commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1804d75f7037398dd0ed2ee67748f666cef84c10
|
utils/setBaud.py
|
utils/setBaud.py
|
#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
baudrates=[1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200]
parser = argparse.ArgumentParser()
parser.add_argument('baudrate', metavar='BAUD', type=int, choices=range(0, 7), help='Current baudrate')
parser.add_argument('newbaudrate', metavar='NEWBAUD', type=int, choices=range(0, 7), help='Baudrate index to set')
args = parser.parse_args()
ADDRESS1 = 1
BAUDRATE2 = args.newbaudrate
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=baudrates[args.baudrate]
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("Setting new baudrate: " + str(baudrates[BAUDRATE2]))
sensor.write_register(1, value=BAUDRATE2, functioncode=6)
minimalmodbus.BAUDRATE=baudrates[BAUDRATE2]
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
|
Add utility for baut setting
|
Add utility for baut setting
|
Python
|
apache-2.0
|
Miceuz/rs485-moist-sensor,Miceuz/rs485-moist-sensor
|
Add utility for baut setting
|
#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
baudrates=[1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200]
parser = argparse.ArgumentParser()
parser.add_argument('baudrate', metavar='BAUD', type=int, choices=range(0, 7), help='Current baudrate')
parser.add_argument('newbaudrate', metavar='NEWBAUD', type=int, choices=range(0, 7), help='Baudrate index to set')
args = parser.parse_args()
ADDRESS1 = 1
BAUDRATE2 = args.newbaudrate
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=baudrates[args.baudrate]
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("Setting new baudrate: " + str(baudrates[BAUDRATE2]))
sensor.write_register(1, value=BAUDRATE2, functioncode=6)
minimalmodbus.BAUDRATE=baudrates[BAUDRATE2]
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
|
<commit_before><commit_msg>Add utility for baut setting<commit_after>
|
#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
baudrates=[1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200]
parser = argparse.ArgumentParser()
parser.add_argument('baudrate', metavar='BAUD', type=int, choices=range(0, 7), help='Current baudrate')
parser.add_argument('newbaudrate', metavar='NEWBAUD', type=int, choices=range(0, 7), help='Baudrate index to set')
args = parser.parse_args()
ADDRESS1 = 1
BAUDRATE2 = args.newbaudrate
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=baudrates[args.baudrate]
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("Setting new baudrate: " + str(baudrates[BAUDRATE2]))
sensor.write_register(1, value=BAUDRATE2, functioncode=6)
minimalmodbus.BAUDRATE=baudrates[BAUDRATE2]
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
|
Add utility for baut setting#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
baudrates=[1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200]
parser = argparse.ArgumentParser()
parser.add_argument('baudrate', metavar='BAUD', type=int, choices=range(0, 7), help='Current baudrate')
parser.add_argument('newbaudrate', metavar='NEWBAUD', type=int, choices=range(0, 7), help='Baudrate index to set')
args = parser.parse_args()
ADDRESS1 = 1
BAUDRATE2 = args.newbaudrate
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=baudrates[args.baudrate]
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("Setting new baudrate: " + str(baudrates[BAUDRATE2]))
sensor.write_register(1, value=BAUDRATE2, functioncode=6)
minimalmodbus.BAUDRATE=baudrates[BAUDRATE2]
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
|
<commit_before><commit_msg>Add utility for baut setting<commit_after>#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
baudrates=[1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200]
parser = argparse.ArgumentParser()
parser.add_argument('baudrate', metavar='BAUD', type=int, choices=range(0, 7), help='Current baudrate')
parser.add_argument('newbaudrate', metavar='NEWBAUD', type=int, choices=range(0, 7), help='Baudrate index to set')
args = parser.parse_args()
ADDRESS1 = 1
BAUDRATE2 = args.newbaudrate
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=baudrates[args.baudrate]
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("Setting new baudrate: " + str(baudrates[BAUDRATE2]))
sensor.write_register(1, value=BAUDRATE2, functioncode=6)
minimalmodbus.BAUDRATE=baudrates[BAUDRATE2]
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
|
|
10a07ebf706c3a553e76a60d25d38eeb0372e257
|
tools/dircondenser.py
|
tools/dircondenser.py
|
#!/usr/bin/env python3
# Copyright 2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Renames test case directories using Git from this:
1 something
3 other
3 foo
3 bar
to this:
1 something
2 other
3 foo
4 bar
This directory must be run from source root as it touches run_unittests.py.
'''
import os, sys, subprocess
from glob import glob
def get_entries():
entries = []
for e in glob('*'):
if not os.path.isdir(e):
sys.exit('Current directory must not contain any files.')
(number, rest) = e.split(' ', 1)
try:
number = int(number)
except ValueError:
sys.exit('Dir name %d does not start with a number.' % e)
entries.append((number, rest))
entries.sort()
return entries
def replace_source(sourcefile, replacements):
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
contents = contents.replace(old_name, new_name)
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname):
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()
replacements = []
for _i, e in enumerate(entries):
i = _i + 1
if e[0] != i:
old_name = str(e[0]) + ' ' + e[1]
new_name = str(i) + ' ' + e[1]
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
if __name__ == '__main__':
if len(sys.argv) != 1:
sys.exit('This script takes no arguments.')
for d in glob('test cases/*'):
condense(d)
|
Add script to condense test directory names.
|
Add script to condense test directory names.
|
Python
|
apache-2.0
|
pexip/meson,becm/meson,MathieuDuponchelle/meson,QuLogic/meson,pexip/meson,becm/meson,jpakkane/meson,becm/meson,pexip/meson,MathieuDuponchelle/meson,MathieuDuponchelle/meson,QuLogic/meson,jpakkane/meson,mesonbuild/meson,MathieuDuponchelle/meson,becm/meson,thiblahute/meson,QuLogic/meson,mesonbuild/meson,mesonbuild/meson,becm/meson,mesonbuild/meson,thiblahute/meson,pexip/meson,jpakkane/meson,thiblahute/meson,jeandet/meson,QuLogic/meson,becm/meson,jeandet/meson,mesonbuild/meson,thiblahute/meson,jpakkane/meson,MathieuDuponchelle/meson,becm/meson,becm/meson,jeandet/meson,jeandet/meson,jpakkane/meson,becm/meson,jpakkane/meson,thiblahute/meson,MathieuDuponchelle/meson,mesonbuild/meson,jpakkane/meson,mesonbuild/meson,pexip/meson,jpakkane/meson,thiblahute/meson,MathieuDuponchelle/meson,jeandet/meson,QuLogic/meson,mesonbuild/meson,pexip/meson,QuLogic/meson,thiblahute/meson,QuLogic/meson,jeandet/meson,jeandet/meson,pexip/meson,pexip/meson,pexip/meson,MathieuDuponchelle/meson,QuLogic/meson,thiblahute/meson,pexip/meson,jeandet/meson,mesonbuild/meson,mesonbuild/meson,thiblahute/meson,MathieuDuponchelle/meson,QuLogic/meson,becm/meson,jpakkane/meson,jeandet/meson
|
Add script to condense test directory names.
|
#!/usr/bin/env python3
# Copyright 2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Renames test case directories using Git from this:
1 something
3 other
3 foo
3 bar
to this:
1 something
2 other
3 foo
4 bar
This directory must be run from source root as it touches run_unittests.py.
'''
import os, sys, subprocess
from glob import glob
def get_entries():
entries = []
for e in glob('*'):
if not os.path.isdir(e):
sys.exit('Current directory must not contain any files.')
(number, rest) = e.split(' ', 1)
try:
number = int(number)
except ValueError:
sys.exit('Dir name %d does not start with a number.' % e)
entries.append((number, rest))
entries.sort()
return entries
def replace_source(sourcefile, replacements):
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
contents = contents.replace(old_name, new_name)
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname):
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()
replacements = []
for _i, e in enumerate(entries):
i = _i + 1
if e[0] != i:
old_name = str(e[0]) + ' ' + e[1]
new_name = str(i) + ' ' + e[1]
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
if __name__ == '__main__':
if len(sys.argv) != 1:
sys.exit('This script takes no arguments.')
for d in glob('test cases/*'):
condense(d)
|
<commit_before><commit_msg>Add script to condense test directory names.<commit_after>
|
#!/usr/bin/env python3
# Copyright 2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Renames test case directories using Git from this:
1 something
3 other
3 foo
3 bar
to this:
1 something
2 other
3 foo
4 bar
This directory must be run from source root as it touches run_unittests.py.
'''
import os, sys, subprocess
from glob import glob
def get_entries():
entries = []
for e in glob('*'):
if not os.path.isdir(e):
sys.exit('Current directory must not contain any files.')
(number, rest) = e.split(' ', 1)
try:
number = int(number)
except ValueError:
sys.exit('Dir name %d does not start with a number.' % e)
entries.append((number, rest))
entries.sort()
return entries
def replace_source(sourcefile, replacements):
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
contents = contents.replace(old_name, new_name)
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname):
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()
replacements = []
for _i, e in enumerate(entries):
i = _i + 1
if e[0] != i:
old_name = str(e[0]) + ' ' + e[1]
new_name = str(i) + ' ' + e[1]
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
if __name__ == '__main__':
if len(sys.argv) != 1:
sys.exit('This script takes no arguments.')
for d in glob('test cases/*'):
condense(d)
|
Add script to condense test directory names.#!/usr/bin/env python3
# Copyright 2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Renames test case directories using Git from this:
1 something
3 other
3 foo
3 bar
to this:
1 something
2 other
3 foo
4 bar
This directory must be run from source root as it touches run_unittests.py.
'''
import os, sys, subprocess
from glob import glob
def get_entries():
entries = []
for e in glob('*'):
if not os.path.isdir(e):
sys.exit('Current directory must not contain any files.')
(number, rest) = e.split(' ', 1)
try:
number = int(number)
except ValueError:
sys.exit('Dir name %d does not start with a number.' % e)
entries.append((number, rest))
entries.sort()
return entries
def replace_source(sourcefile, replacements):
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
contents = contents.replace(old_name, new_name)
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname):
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()
replacements = []
for _i, e in enumerate(entries):
i = _i + 1
if e[0] != i:
old_name = str(e[0]) + ' ' + e[1]
new_name = str(i) + ' ' + e[1]
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
if __name__ == '__main__':
if len(sys.argv) != 1:
sys.exit('This script takes no arguments.')
for d in glob('test cases/*'):
condense(d)
|
<commit_before><commit_msg>Add script to condense test directory names.<commit_after>#!/usr/bin/env python3
# Copyright 2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Renames test case directories using Git from this:
1 something
3 other
3 foo
3 bar
to this:
1 something
2 other
3 foo
4 bar
This directory must be run from source root as it touches run_unittests.py.
'''
import os, sys, subprocess
from glob import glob
def get_entries():
entries = []
for e in glob('*'):
if not os.path.isdir(e):
sys.exit('Current directory must not contain any files.')
(number, rest) = e.split(' ', 1)
try:
number = int(number)
except ValueError:
sys.exit('Dir name %d does not start with a number.' % e)
entries.append((number, rest))
entries.sort()
return entries
def replace_source(sourcefile, replacements):
with open(sourcefile, 'r') as f:
contents = f.read()
for old_name, new_name in replacements:
contents = contents.replace(old_name, new_name)
with open(sourcefile, 'w') as f:
f.write(contents)
def condense(dirname):
curdir = os.getcwd()
os.chdir(dirname)
entries = get_entries()
replacements = []
for _i, e in enumerate(entries):
i = _i + 1
if e[0] != i:
old_name = str(e[0]) + ' ' + e[1]
new_name = str(i) + ' ' + e[1]
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
if __name__ == '__main__':
if len(sys.argv) != 1:
sys.exit('This script takes no arguments.')
for d in glob('test cases/*'):
condense(d)
|
|
e4869e9e348dd5a0441edc0aca7c0298c2c892a9
|
mopidy/backends/dummy.py
|
mopidy/backends/dummy.py
|
from __future__ import unicode_literals
# Make classes previously residing here available in the old location for
# backwards compatibility with extensions targeting Mopidy < 0.18.
from mopidy.backend.dummy import * # noqa
|
Make old DummyBackend imports work
|
backend: Make old DummyBackend imports work
|
Python
|
apache-2.0
|
glogiotatidis/mopidy,vrs01/mopidy,mokieyue/mopidy,quartz55/mopidy,jmarsik/mopidy,diandiankan/mopidy,jmarsik/mopidy,dbrgn/mopidy,bencevans/mopidy,mopidy/mopidy,bencevans/mopidy,mokieyue/mopidy,tkem/mopidy,hkariti/mopidy,bencevans/mopidy,glogiotatidis/mopidy,swak/mopidy,ZenithDK/mopidy,kingosticks/mopidy,diandiankan/mopidy,abarisain/mopidy,vrs01/mopidy,jodal/mopidy,diandiankan/mopidy,jodal/mopidy,kingosticks/mopidy,mopidy/mopidy,hkariti/mopidy,pacificIT/mopidy,bacontext/mopidy,jmarsik/mopidy,ali/mopidy,ZenithDK/mopidy,ali/mopidy,jcass77/mopidy,bacontext/mopidy,rawdlite/mopidy,dbrgn/mopidy,adamcik/mopidy,pacificIT/mopidy,jodal/mopidy,bacontext/mopidy,tkem/mopidy,adamcik/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,jcass77/mopidy,quartz55/mopidy,dbrgn/mopidy,pacificIT/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,bencevans/mopidy,hkariti/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,jcass77/mopidy,hkariti/mopidy,ali/mopidy,dbrgn/mopidy,liamw9534/mopidy,tkem/mopidy,pacificIT/mopidy,quartz55/mopidy,bacontext/mopidy,woutervanwijk/mopidy,adamcik/mopidy,jmarsik/mopidy,rawdlite/mopidy,swak/mopidy,rawdlite/mopidy,ZenithDK/mopidy,mokieyue/mopidy,rawdlite/mopidy,priestd09/mopidy,SuperStarPL/mopidy,tkem/mopidy,swak/mopidy,priestd09/mopidy,SuperStarPL/mopidy,priestd09/mopidy,kingosticks/mopidy,woutervanwijk/mopidy,quartz55/mopidy,liamw9534/mopidy,swak/mopidy,mokieyue/mopidy,vrs01/mopidy,mopidy/mopidy,vrs01/mopidy,abarisain/mopidy,ali/mopidy
|
backend: Make old DummyBackend imports work
|
from __future__ import unicode_literals
# Make classes previously residing here available in the old location for
# backwards compatibility with extensions targeting Mopidy < 0.18.
from mopidy.backend.dummy import * # noqa
|
<commit_before><commit_msg>backend: Make old DummyBackend imports work<commit_after>
|
from __future__ import unicode_literals
# Make classes previously residing here available in the old location for
# backwards compatibility with extensions targeting Mopidy < 0.18.
from mopidy.backend.dummy import * # noqa
|
backend: Make old DummyBackend imports workfrom __future__ import unicode_literals
# Make classes previously residing here available in the old location for
# backwards compatibility with extensions targeting Mopidy < 0.18.
from mopidy.backend.dummy import * # noqa
|
<commit_before><commit_msg>backend: Make old DummyBackend imports work<commit_after>from __future__ import unicode_literals
# Make classes previously residing here available in the old location for
# backwards compatibility with extensions targeting Mopidy < 0.18.
from mopidy.backend.dummy import * # noqa
|
|
e3d54a292576e33e1dcd2c5bd58f78de4e71682b
|
flask-index/app/main.py
|
flask-index/app/main.py
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask (default)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
Add default flask-index file that also sends index.html to /
|
Add default flask-index file that also sends index.html to /
|
Python
|
apache-2.0
|
tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker
|
Add default flask-index file that also sends index.html to /
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask (default)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
<commit_before><commit_msg>Add default flask-index file that also sends index.html to /<commit_after>
|
from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask (default)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
Add default flask-index file that also sends index.html to /from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask (default)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
<commit_before><commit_msg>Add default flask-index file that also sends index.html to /<commit_after>from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask (default)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
|
|
30a2e6f39cde7d8cd19af7c783e85a50593a5e05
|
.editorconfig*.py
|
.editorconfig*.py
|
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[*.bat]
indent_style = tab
end_of_line = crlf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
|
Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.
|
Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.
|
Python
|
isc
|
nsh87/regressors
|
Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.
|
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[*.bat]
indent_style = tab
end_of_line = crlf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
|
<commit_before><commit_msg>Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.<commit_after>
|
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[*.bat]
indent_style = tab
end_of_line = crlf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
|
Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[*.bat]
indent_style = tab
end_of_line = crlf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
|
<commit_before><commit_msg>Add .editorconfig.py to suggest indent amount, spaces over tabs, etc.<commit_after># http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[*.bat]
indent_style = tab
end_of_line = crlf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
|
|
16bd993545a665f4fc127df3bd5a2b69349bdedd
|
scripts/create_user.py
|
scripts/create_user.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#==============================================================================
# Script for creating MySQL user
#==============================================================================
import os
import sys
import mysql.connector
from mysql.connector import errorcode
sys.path.insert(1, '../src')
from config import config
from sql.tables import TABLES
if __name__ == '__main__':
if len(sys.argv) < 3:
print('There is not enough arguments.')
print('Use following arguments:')
print('\tpython {} config.ini MYSQL_ROOT_PASSWORD'.format(
os.path.basename(__file__)))
sys.exit(1)
# Open connection to MySQL server and get cursor
cnx = mysql.connector.connect(
host=config['mysql_host'],
user='root',
password=config['mysql_root_pass'])
cursor = cnx.cursor()
# Create MySql user
command = '''
CREATE USER '{}'@'{}' IDENTIFIED BY '{}';
GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}';
FLUSH PRIVILEGES;
'''.format(config['mysql_user'], config['mysql_host'], config['mysql_pass'],
config['mysql_user'], config['mysql_host'])
try:
print("Creating user '{}' identified by {}: ".format(
config['mysql_user'], config['mysql_pass']), end='')
cursor.execute(command, multi=True)
except mysql.connector.Error as err:
print(err.msg)
else:
print("OK")
# Close connection and database
cursor.close()
cnx.close()
|
Add script to create MySQL user.
|
scripts: Add script to create MySQL user.
|
Python
|
mit
|
alberand/tserver,alberand/tserver,alberand/tserver,alberand/tserver
|
scripts: Add script to create MySQL user.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#==============================================================================
# Script for creating MySQL user
#==============================================================================
import os
import sys
import mysql.connector
from mysql.connector import errorcode
sys.path.insert(1, '../src')
from config import config
from sql.tables import TABLES
if __name__ == '__main__':
if len(sys.argv) < 3:
print('There is not enough arguments.')
print('Use following arguments:')
print('\tpython {} config.ini MYSQL_ROOT_PASSWORD'.format(
os.path.basename(__file__)))
sys.exit(1)
# Open connection to MySQL server and get cursor
cnx = mysql.connector.connect(
host=config['mysql_host'],
user='root',
password=config['mysql_root_pass'])
cursor = cnx.cursor()
# Create MySql user
command = '''
CREATE USER '{}'@'{}' IDENTIFIED BY '{}';
GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}';
FLUSH PRIVILEGES;
'''.format(config['mysql_user'], config['mysql_host'], config['mysql_pass'],
config['mysql_user'], config['mysql_host'])
try:
print("Creating user '{}' identified by {}: ".format(
config['mysql_user'], config['mysql_pass']), end='')
cursor.execute(command, multi=True)
except mysql.connector.Error as err:
print(err.msg)
else:
print("OK")
# Close connection and database
cursor.close()
cnx.close()
|
<commit_before><commit_msg>scripts: Add script to create MySQL user.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#==============================================================================
# Script for creating MySQL user
#==============================================================================
import os
import sys
import mysql.connector
from mysql.connector import errorcode
sys.path.insert(1, '../src')
from config import config
from sql.tables import TABLES
if __name__ == '__main__':
if len(sys.argv) < 3:
print('There is not enough arguments.')
print('Use following arguments:')
print('\tpython {} config.ini MYSQL_ROOT_PASSWORD'.format(
os.path.basename(__file__)))
sys.exit(1)
# Open connection to MySQL server and get cursor
cnx = mysql.connector.connect(
host=config['mysql_host'],
user='root',
password=config['mysql_root_pass'])
cursor = cnx.cursor()
# Create MySql user
command = '''
CREATE USER '{}'@'{}' IDENTIFIED BY '{}';
GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}';
FLUSH PRIVILEGES;
'''.format(config['mysql_user'], config['mysql_host'], config['mysql_pass'],
config['mysql_user'], config['mysql_host'])
try:
print("Creating user '{}' identified by {}: ".format(
config['mysql_user'], config['mysql_pass']), end='')
cursor.execute(command, multi=True)
except mysql.connector.Error as err:
print(err.msg)
else:
print("OK")
# Close connection and database
cursor.close()
cnx.close()
|
scripts: Add script to create MySQL user.#!/usr/bin/env python
# -*- coding: utf-8 -*-
#==============================================================================
# Script for creating MySQL user
#==============================================================================
import os
import sys
import mysql.connector
from mysql.connector import errorcode
sys.path.insert(1, '../src')
from config import config
from sql.tables import TABLES
if __name__ == '__main__':
if len(sys.argv) < 3:
print('There is not enough arguments.')
print('Use following arguments:')
print('\tpython {} config.ini MYSQL_ROOT_PASSWORD'.format(
os.path.basename(__file__)))
sys.exit(1)
# Open connection to MySQL server and get cursor
cnx = mysql.connector.connect(
host=config['mysql_host'],
user='root',
password=config['mysql_root_pass'])
cursor = cnx.cursor()
# Create MySql user
command = '''
CREATE USER '{}'@'{}' IDENTIFIED BY '{}';
GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}';
FLUSH PRIVILEGES;
'''.format(config['mysql_user'], config['mysql_host'], config['mysql_pass'],
config['mysql_user'], config['mysql_host'])
try:
print("Creating user '{}' identified by {}: ".format(
config['mysql_user'], config['mysql_pass']), end='')
cursor.execute(command, multi=True)
except mysql.connector.Error as err:
print(err.msg)
else:
print("OK")
# Close connection and database
cursor.close()
cnx.close()
|
<commit_before><commit_msg>scripts: Add script to create MySQL user.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#==============================================================================
# Script for creating MySQL user
#==============================================================================
import os
import sys
import mysql.connector
from mysql.connector import errorcode
sys.path.insert(1, '../src')
from config import config
from sql.tables import TABLES
if __name__ == '__main__':
if len(sys.argv) < 3:
print('There is not enough arguments.')
print('Use following arguments:')
print('\tpython {} config.ini MYSQL_ROOT_PASSWORD'.format(
os.path.basename(__file__)))
sys.exit(1)
# Open connection to MySQL server and get cursor
cnx = mysql.connector.connect(
host=config['mysql_host'],
user='root',
password=config['mysql_root_pass'])
cursor = cnx.cursor()
# Create MySql user
command = '''
CREATE USER '{}'@'{}' IDENTIFIED BY '{}';
GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}';
FLUSH PRIVILEGES;
'''.format(config['mysql_user'], config['mysql_host'], config['mysql_pass'],
config['mysql_user'], config['mysql_host'])
try:
print("Creating user '{}' identified by {}: ".format(
config['mysql_user'], config['mysql_pass']), end='')
cursor.execute(command, multi=True)
except mysql.connector.Error as err:
print(err.msg)
else:
print("OK")
# Close connection and database
cursor.close()
cnx.close()
|
|
eb30ba0366e7a5639faaad91972df0ee7bf0a049
|
webserver/processes_web_server.py
|
webserver/processes_web_server.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import BaseHTTPServer
import json
import time
import psutil
HOST_NAME = '0.0.0.0'
PORT_NUMBER = 8222
class ProcessListHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
options = []
for proc in psutil.process_iter():
try:
options.append(dict(key=proc.name(), value=proc.pid))
except psutil.NoSuchProcess:
pass
s.wfile.write(json.dumps(dict(options=sorted(options, key=lambda p: p['value'], reverse=True))))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), ProcessListHandler)
print(time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER))
|
Add script for listing running processes.
|
Add script for listing running processes.
|
Python
|
mit
|
grundic/teamcity-web-parameters,grundic/teamcity-web-parameters,grundic/teamcity-web-parameters
|
Add script for listing running processes.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import BaseHTTPServer
import json
import time
import psutil
HOST_NAME = '0.0.0.0'
PORT_NUMBER = 8222
class ProcessListHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
options = []
for proc in psutil.process_iter():
try:
options.append(dict(key=proc.name(), value=proc.pid))
except psutil.NoSuchProcess:
pass
s.wfile.write(json.dumps(dict(options=sorted(options, key=lambda p: p['value'], reverse=True))))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), ProcessListHandler)
print(time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER))
|
<commit_before><commit_msg>Add script for listing running processes.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import BaseHTTPServer
import json
import time
import psutil
HOST_NAME = '0.0.0.0'
PORT_NUMBER = 8222
class ProcessListHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
options = []
for proc in psutil.process_iter():
try:
options.append(dict(key=proc.name(), value=proc.pid))
except psutil.NoSuchProcess:
pass
s.wfile.write(json.dumps(dict(options=sorted(options, key=lambda p: p['value'], reverse=True))))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), ProcessListHandler)
print(time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER))
|
Add script for listing running processes.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import BaseHTTPServer
import json
import time
import psutil
HOST_NAME = '0.0.0.0'
PORT_NUMBER = 8222
class ProcessListHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
options = []
for proc in psutil.process_iter():
try:
options.append(dict(key=proc.name(), value=proc.pid))
except psutil.NoSuchProcess:
pass
s.wfile.write(json.dumps(dict(options=sorted(options, key=lambda p: p['value'], reverse=True))))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), ProcessListHandler)
print(time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER))
|
<commit_before><commit_msg>Add script for listing running processes.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import BaseHTTPServer
import json
import time
import psutil
HOST_NAME = '0.0.0.0'
PORT_NUMBER = 8222
class ProcessListHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
options = []
for proc in psutil.process_iter():
try:
options.append(dict(key=proc.name(), value=proc.pid))
except psutil.NoSuchProcess:
pass
s.wfile.write(json.dumps(dict(options=sorted(options, key=lambda p: p['value'], reverse=True))))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class((HOST_NAME, PORT_NUMBER), ProcessListHandler)
print(time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER))
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER))
|
|
164b860e4a44a22a1686cf6133fac6258fc97db6
|
nbgrader/tests/apps/test_nbgrader_fetch.py
|
nbgrader/tests/apps/test_nbgrader_fetch.py
|
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
|
import os
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _fetch(self, assignment, exchange, flags="", retcode=0):
run_command(
'nbgrader fetch abc101 {} '
'--TransferApp.exchange_directory={} '
'{}'.format(assignment, exchange, flags),
retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
def test_fetch(self, exchange):
self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb"))
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
|
Add some basic tests for nbgrader fetch
|
Add some basic tests for nbgrader fetch
|
Python
|
bsd-3-clause
|
modulexcite/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,alope107/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader
|
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
Add some basic tests for nbgrader fetch
|
import os
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _fetch(self, assignment, exchange, flags="", retcode=0):
run_command(
'nbgrader fetch abc101 {} '
'--TransferApp.exchange_directory={} '
'{}'.format(assignment, exchange, flags),
retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
def test_fetch(self, exchange):
self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb"))
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
|
<commit_before>from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
<commit_msg>Add some basic tests for nbgrader fetch<commit_after>
|
import os
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _fetch(self, assignment, exchange, flags="", retcode=0):
run_command(
'nbgrader fetch abc101 {} '
'--TransferApp.exchange_directory={} '
'{}'.format(assignment, exchange, flags),
retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
def test_fetch(self, exchange):
self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb"))
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
|
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
Add some basic tests for nbgrader fetchimport os
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _fetch(self, assignment, exchange, flags="", retcode=0):
run_command(
'nbgrader fetch abc101 {} '
'--TransferApp.exchange_directory={} '
'{}'.format(assignment, exchange, flags),
retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
def test_fetch(self, exchange):
self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb"))
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
|
<commit_before>from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
<commit_msg>Add some basic tests for nbgrader fetch<commit_after>import os
from nbgrader.tests import run_command
from nbgrader.tests.apps.base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _fetch(self, assignment, exchange, flags="", retcode=0):
run_command(
'nbgrader fetch abc101 {} '
'--TransferApp.exchange_directory={} '
'{}'.format(assignment, exchange, flags),
retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_command("nbgrader fetch --help-all")
def test_fetch(self, exchange):
self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb"))
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
|
a8d2d9a953cba50d6c2ccbd6653f44e26982dff0
|
river/river_plugins.py
|
river/river_plugins.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
class Plugins:
cb_new_entries = []
cb_download_complete = []
p_config = {}
def __init__(self):
pass
def new_entries(self):
pass
def download_complete(self):
pass
def register_plugin(self, name, path, config):
pass
|
Add prototype for plugin handler
|
Add prototype for plugin handler
|
Python
|
mit
|
ozamosi/river,ozamosi/river
|
Add prototype for plugin handler
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
class Plugins:
cb_new_entries = []
cb_download_complete = []
p_config = {}
def __init__(self):
pass
def new_entries(self):
pass
def download_complete(self):
pass
def register_plugin(self, name, path, config):
pass
|
<commit_before><commit_msg>Add prototype for plugin handler<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
class Plugins:
cb_new_entries = []
cb_download_complete = []
p_config = {}
def __init__(self):
pass
def new_entries(self):
pass
def download_complete(self):
pass
def register_plugin(self, name, path, config):
pass
|
Add prototype for plugin handler#!/usr/bin/env python
# -*- encoding: utf-8 -*-
class Plugins:
cb_new_entries = []
cb_download_complete = []
p_config = {}
def __init__(self):
pass
def new_entries(self):
pass
def download_complete(self):
pass
def register_plugin(self, name, path, config):
pass
|
<commit_before><commit_msg>Add prototype for plugin handler<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
class Plugins:
cb_new_entries = []
cb_download_complete = []
p_config = {}
def __init__(self):
pass
def new_entries(self):
pass
def download_complete(self):
pass
def register_plugin(self, name, path, config):
pass
|
|
58b69493a92ada5897300a96ef934fe593ffe926
|
docs/tutorial/src/python-types/tutorial001.py
|
docs/tutorial/src/python-types/tutorial001.py
|
def get_full_name(first_name, last_name):
full_name = first_name.title() + " " + last_name.title()
return full_name
print(get_full_name("john", "doe"))
|
Add tutorial src for python-types
|
:memo: Add tutorial src for python-types
|
Python
|
mit
|
tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi
|
:memo: Add tutorial src for python-types
|
def get_full_name(first_name, last_name):
full_name = first_name.title() + " " + last_name.title()
return full_name
print(get_full_name("john", "doe"))
|
<commit_before><commit_msg>:memo: Add tutorial src for python-types<commit_after>
|
def get_full_name(first_name, last_name):
full_name = first_name.title() + " " + last_name.title()
return full_name
print(get_full_name("john", "doe"))
|
:memo: Add tutorial src for python-typesdef get_full_name(first_name, last_name):
full_name = first_name.title() + " " + last_name.title()
return full_name
print(get_full_name("john", "doe"))
|
<commit_before><commit_msg>:memo: Add tutorial src for python-types<commit_after>def get_full_name(first_name, last_name):
full_name = first_name.title() + " " + last_name.title()
return full_name
print(get_full_name("john", "doe"))
|
|
e59fcfe88dfac788b6dea197d308f4f9216352f9
|
spotpy/unittests/test_objectivefunctions.py
|
spotpy/unittests/test_objectivefunctions.py
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
Add tests for bias and length mismatch
|
Add tests for bias and length mismatch
|
Python
|
mit
|
thouska/spotpy,bees4ever/spotpy,thouska/spotpy,bees4ever/spotpy,thouska/spotpy,bees4ever/spotpy
|
Add tests for bias and length mismatch
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for bias and length mismatch<commit_after>
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
Add tests for bias and length mismatchimport unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for bias and length mismatch<commit_after>import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
|
6b5ab66b7fb3d514c05bf3cf69023b1e119e1797
|
stock_picking_list/9.0.1.0.0/post-migration.py
|
stock_picking_list/9.0.1.0.0/post-migration.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2015-Today: Odoo Community Association
# @author: Stephane LE CORNEC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
# because path of report has changed, we need to reload it
openupgrade.load_data(
cr, 'stock_picking_list', 'report/report_data.xml')
|
ADD mig scripts for picking list
|
ADD mig scripts for picking list
|
Python
|
agpl-3.0
|
ingadhoc/stock
|
ADD mig scripts for picking list
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2015-Today: Odoo Community Association
# @author: Stephane LE CORNEC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
# because path of report has changed, we need to reload it
openupgrade.load_data(
cr, 'stock_picking_list', 'report/report_data.xml')
|
<commit_before><commit_msg>ADD mig scripts for picking list<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2015-Today: Odoo Community Association
# @author: Stephane LE CORNEC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
# because path of report has changed, we need to reload it
openupgrade.load_data(
cr, 'stock_picking_list', 'report/report_data.xml')
|
ADD mig scripts for picking list# -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2015-Today: Odoo Community Association
# @author: Stephane LE CORNEC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
# because path of report has changed, we need to reload it
openupgrade.load_data(
cr, 'stock_picking_list', 'report/report_data.xml')
|
<commit_before><commit_msg>ADD mig scripts for picking list<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2015-Today: Odoo Community Association
# @author: Stephane LE CORNEC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
# because path of report has changed, we need to reload it
openupgrade.load_data(
cr, 'stock_picking_list', 'report/report_data.xml')
|
|
25e6fd9b6a17def2f3a07b83ace1e65e6f1fa40d
|
test/functional/abc_rpc_buildavalancheproof.py
|
test/functional/abc_rpc_buildavalancheproof.py
|
#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the buildavalancheproof RPC"""
from test_framework.avatools import create_coinbase_stakes
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class BuildAvalancheProofTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
addrkey0 = node.get_deterministic_priv_key()
blockhashes = node.generatetoaddress(2, addrkey0.address)
stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
privkey = ECKey()
privkey.generate()
proof_master = privkey.get_pubkey().get_bytes().hex()
def check_buildavalancheproof_error(
error_code, error_message, stakes):
assert_raises_rpc_error(
error_code,
error_message,
node.buildavalancheproof,
# Sequence
0,
# Expiration
0,
# Master
proof_master,
stakes,
)
good_stake = stakes[0]
self.log.info("Error cases")
negative_vout = good_stake.copy()
negative_vout['vout'] = -1
check_buildavalancheproof_error(-22,
"vout must be positive",
[negative_vout],
)
zero_height = good_stake.copy()
zero_height['height'] = 0
check_buildavalancheproof_error(-22,
"height must be positive",
[zero_height],
)
negative_height = good_stake.copy()
negative_height['height'] = -1
check_buildavalancheproof_error(-22,
"height must be positive",
[negative_height],
)
missing_amount = good_stake.copy()
del missing_amount['amount']
check_buildavalancheproof_error(-8,
"Missing amount",
[missing_amount],
)
invalid_privkey = good_stake.copy()
invalid_privkey['privatekey'] = 'foobar'
check_buildavalancheproof_error(-8,
"Invalid private key",
[invalid_privkey],
)
self.log.info("Happy path")
assert node.buildavalancheproof(0, 0, proof_master, [good_stake])
if __name__ == '__main__':
BuildAvalancheProofTest().main()
|
Add some test coverage for the buildavalancheproof RPC
|
[avalanche] Add some test coverage for the buildavalancheproof RPC
Summary:
The RPC is used everywhere in the tests but is not tested itself.
This diff does not intend to extend the safety checks but only check for the existing errors to prevent regressions.
Test Plan:
./test/functional/test_runner.py abc_rpc_buildavalancheproof
Reviewers: #bitcoin_abc, PiRK
Reviewed By: #bitcoin_abc, PiRK
Differential Revision: https://reviews.bitcoinabc.org/D10084
|
Python
|
mit
|
Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc
|
[avalanche] Add some test coverage for the buildavalancheproof RPC
Summary:
The RPC is used everywhere in the tests but is not tested itself.
This diff does not intend to extend the safety checks but only check for the existing errors to prevent regressions.
Test Plan:
./test/functional/test_runner.py abc_rpc_buildavalancheproof
Reviewers: #bitcoin_abc, PiRK
Reviewed By: #bitcoin_abc, PiRK
Differential Revision: https://reviews.bitcoinabc.org/D10084
|
#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the buildavalancheproof RPC"""
from test_framework.avatools import create_coinbase_stakes
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class BuildAvalancheProofTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
addrkey0 = node.get_deterministic_priv_key()
blockhashes = node.generatetoaddress(2, addrkey0.address)
stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
privkey = ECKey()
privkey.generate()
proof_master = privkey.get_pubkey().get_bytes().hex()
def check_buildavalancheproof_error(
error_code, error_message, stakes):
assert_raises_rpc_error(
error_code,
error_message,
node.buildavalancheproof,
# Sequence
0,
# Expiration
0,
# Master
proof_master,
stakes,
)
good_stake = stakes[0]
self.log.info("Error cases")
negative_vout = good_stake.copy()
negative_vout['vout'] = -1
check_buildavalancheproof_error(-22,
"vout must be positive",
[negative_vout],
)
zero_height = good_stake.copy()
zero_height['height'] = 0
check_buildavalancheproof_error(-22,
"height must be positive",
[zero_height],
)
negative_height = good_stake.copy()
negative_height['height'] = -1
check_buildavalancheproof_error(-22,
"height must be positive",
[negative_height],
)
missing_amount = good_stake.copy()
del missing_amount['amount']
check_buildavalancheproof_error(-8,
"Missing amount",
[missing_amount],
)
invalid_privkey = good_stake.copy()
invalid_privkey['privatekey'] = 'foobar'
check_buildavalancheproof_error(-8,
"Invalid private key",
[invalid_privkey],
)
self.log.info("Happy path")
assert node.buildavalancheproof(0, 0, proof_master, [good_stake])
if __name__ == '__main__':
BuildAvalancheProofTest().main()
|
<commit_before><commit_msg>[avalanche] Add some test coverage for the buildavalancheproof RPC
Summary:
The RPC is used everywhere in the tests but is not tested itself.
This diff does not intend to extend the safety checks but only check for the existing errors to prevent regressions.
Test Plan:
./test/functional/test_runner.py abc_rpc_buildavalancheproof
Reviewers: #bitcoin_abc, PiRK
Reviewed By: #bitcoin_abc, PiRK
Differential Revision: https://reviews.bitcoinabc.org/D10084<commit_after>
|
#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the buildavalancheproof RPC"""
from test_framework.avatools import create_coinbase_stakes
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class BuildAvalancheProofTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
addrkey0 = node.get_deterministic_priv_key()
blockhashes = node.generatetoaddress(2, addrkey0.address)
stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
privkey = ECKey()
privkey.generate()
proof_master = privkey.get_pubkey().get_bytes().hex()
def check_buildavalancheproof_error(
error_code, error_message, stakes):
assert_raises_rpc_error(
error_code,
error_message,
node.buildavalancheproof,
# Sequence
0,
# Expiration
0,
# Master
proof_master,
stakes,
)
good_stake = stakes[0]
self.log.info("Error cases")
negative_vout = good_stake.copy()
negative_vout['vout'] = -1
check_buildavalancheproof_error(-22,
"vout must be positive",
[negative_vout],
)
zero_height = good_stake.copy()
zero_height['height'] = 0
check_buildavalancheproof_error(-22,
"height must be positive",
[zero_height],
)
negative_height = good_stake.copy()
negative_height['height'] = -1
check_buildavalancheproof_error(-22,
"height must be positive",
[negative_height],
)
missing_amount = good_stake.copy()
del missing_amount['amount']
check_buildavalancheproof_error(-8,
"Missing amount",
[missing_amount],
)
invalid_privkey = good_stake.copy()
invalid_privkey['privatekey'] = 'foobar'
check_buildavalancheproof_error(-8,
"Invalid private key",
[invalid_privkey],
)
self.log.info("Happy path")
assert node.buildavalancheproof(0, 0, proof_master, [good_stake])
if __name__ == '__main__':
BuildAvalancheProofTest().main()
|
[avalanche] Add some test coverage for the buildavalancheproof RPC
Summary:
The RPC is used everywhere in the tests but is not tested itself.
This diff does not intend to extend the safety checks but only check for the existing errors to prevent regressions.
Test Plan:
./test/functional/test_runner.py abc_rpc_buildavalancheproof
Reviewers: #bitcoin_abc, PiRK
Reviewed By: #bitcoin_abc, PiRK
Differential Revision: https://reviews.bitcoinabc.org/D10084#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the buildavalancheproof RPC"""
from test_framework.avatools import create_coinbase_stakes
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class BuildAvalancheProofTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
addrkey0 = node.get_deterministic_priv_key()
blockhashes = node.generatetoaddress(2, addrkey0.address)
stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
privkey = ECKey()
privkey.generate()
proof_master = privkey.get_pubkey().get_bytes().hex()
def check_buildavalancheproof_error(
error_code, error_message, stakes):
assert_raises_rpc_error(
error_code,
error_message,
node.buildavalancheproof,
# Sequence
0,
# Expiration
0,
# Master
proof_master,
stakes,
)
good_stake = stakes[0]
self.log.info("Error cases")
negative_vout = good_stake.copy()
negative_vout['vout'] = -1
check_buildavalancheproof_error(-22,
"vout must be positive",
[negative_vout],
)
zero_height = good_stake.copy()
zero_height['height'] = 0
check_buildavalancheproof_error(-22,
"height must be positive",
[zero_height],
)
negative_height = good_stake.copy()
negative_height['height'] = -1
check_buildavalancheproof_error(-22,
"height must be positive",
[negative_height],
)
missing_amount = good_stake.copy()
del missing_amount['amount']
check_buildavalancheproof_error(-8,
"Missing amount",
[missing_amount],
)
invalid_privkey = good_stake.copy()
invalid_privkey['privatekey'] = 'foobar'
check_buildavalancheproof_error(-8,
"Invalid private key",
[invalid_privkey],
)
self.log.info("Happy path")
assert node.buildavalancheproof(0, 0, proof_master, [good_stake])
if __name__ == '__main__':
BuildAvalancheProofTest().main()
|
<commit_before><commit_msg>[avalanche] Add some test coverage for the buildavalancheproof RPC
Summary:
The RPC is used everywhere in the tests but is not tested itself.
This diff does not intend to extend the safety checks but only check for the existing errors to prevent regressions.
Test Plan:
./test/functional/test_runner.py abc_rpc_buildavalancheproof
Reviewers: #bitcoin_abc, PiRK
Reviewed By: #bitcoin_abc, PiRK
Differential Revision: https://reviews.bitcoinabc.org/D10084<commit_after>#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the buildavalancheproof RPC"""
from test_framework.avatools import create_coinbase_stakes
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class BuildAvalancheProofTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
addrkey0 = node.get_deterministic_priv_key()
blockhashes = node.generatetoaddress(2, addrkey0.address)
stakes = create_coinbase_stakes(node, [blockhashes[0]], addrkey0.key)
privkey = ECKey()
privkey.generate()
proof_master = privkey.get_pubkey().get_bytes().hex()
def check_buildavalancheproof_error(
error_code, error_message, stakes):
assert_raises_rpc_error(
error_code,
error_message,
node.buildavalancheproof,
# Sequence
0,
# Expiration
0,
# Master
proof_master,
stakes,
)
good_stake = stakes[0]
self.log.info("Error cases")
negative_vout = good_stake.copy()
negative_vout['vout'] = -1
check_buildavalancheproof_error(-22,
"vout must be positive",
[negative_vout],
)
zero_height = good_stake.copy()
zero_height['height'] = 0
check_buildavalancheproof_error(-22,
"height must be positive",
[zero_height],
)
negative_height = good_stake.copy()
negative_height['height'] = -1
check_buildavalancheproof_error(-22,
"height must be positive",
[negative_height],
)
missing_amount = good_stake.copy()
del missing_amount['amount']
check_buildavalancheproof_error(-8,
"Missing amount",
[missing_amount],
)
invalid_privkey = good_stake.copy()
invalid_privkey['privatekey'] = 'foobar'
check_buildavalancheproof_error(-8,
"Invalid private key",
[invalid_privkey],
)
self.log.info("Happy path")
assert node.buildavalancheproof(0, 0, proof_master, [good_stake])
if __name__ == '__main__':
BuildAvalancheProofTest().main()
|
|
ede3742ad0dd92d28a2c072b6c700b9ebc6484b9
|
game/quests/__init__.py
|
game/quests/__init__.py
|
# -*- coding: utf-8 -*-
"""
Quests
- questcache.wdb
"""
from .. import *
class Quest(Model):
def getTooltip(self):
return QuestTooltip(self)
class QuestTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName(), color=YELLOW)
ret = self.values
self.values = []
return ret
class QuestProxy(object):
"""
WDBC proxy for quests
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("questcache.wdb", build=-1)
def get(self, id):
return self.__file[id]
def getName(self, row):
return row.name
Quest.initProxy(QuestProxy)
|
Add the quest module and basic Quest, QuestTooltip classes
|
game/quests: Add the quest module and basic Quest, QuestTooltip classes
|
Python
|
cc0-1.0
|
jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow
|
game/quests: Add the quest module and basic Quest, QuestTooltip classes
|
# -*- coding: utf-8 -*-
"""
Quests
- questcache.wdb
"""
from .. import *
class Quest(Model):
def getTooltip(self):
return QuestTooltip(self)
class QuestTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName(), color=YELLOW)
ret = self.values
self.values = []
return ret
class QuestProxy(object):
"""
WDBC proxy for quests
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("questcache.wdb", build=-1)
def get(self, id):
return self.__file[id]
def getName(self, row):
return row.name
Quest.initProxy(QuestProxy)
|
<commit_before><commit_msg>game/quests: Add the quest module and basic Quest, QuestTooltip classes<commit_after>
|
# -*- coding: utf-8 -*-
"""
Quests
- questcache.wdb
"""
from .. import *
class Quest(Model):
def getTooltip(self):
return QuestTooltip(self)
class QuestTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName(), color=YELLOW)
ret = self.values
self.values = []
return ret
class QuestProxy(object):
"""
WDBC proxy for quests
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("questcache.wdb", build=-1)
def get(self, id):
return self.__file[id]
def getName(self, row):
return row.name
Quest.initProxy(QuestProxy)
|
game/quests: Add the quest module and basic Quest, QuestTooltip classes# -*- coding: utf-8 -*-
"""
Quests
- questcache.wdb
"""
from .. import *
class Quest(Model):
def getTooltip(self):
return QuestTooltip(self)
class QuestTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName(), color=YELLOW)
ret = self.values
self.values = []
return ret
class QuestProxy(object):
"""
WDBC proxy for quests
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("questcache.wdb", build=-1)
def get(self, id):
return self.__file[id]
def getName(self, row):
return row.name
Quest.initProxy(QuestProxy)
|
<commit_before><commit_msg>game/quests: Add the quest module and basic Quest, QuestTooltip classes<commit_after># -*- coding: utf-8 -*-
"""
Quests
- questcache.wdb
"""
from .. import *
class Quest(Model):
def getTooltip(self):
return QuestTooltip(self)
class QuestTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName(), color=YELLOW)
ret = self.values
self.values = []
return ret
class QuestProxy(object):
"""
WDBC proxy for quests
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("questcache.wdb", build=-1)
def get(self, id):
return self.__file[id]
def getName(self, row):
return row.name
Quest.initProxy(QuestProxy)
|
|
7ceadc54b271d0534229c2f2feed617e97331671
|
tests/test_testlevelsplit_output_task_order.py
|
tests/test_testlevelsplit_output_task_order.py
|
import shutil
import subprocess
import sys
import tempfile
import textwrap
import unittest
from robot.api import ExecutionResult
class PabotTestlevelsplitOutputTaskOrderTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.wait()
def test_testlevelsplit_output_task_order(self):
self._run_tests_with(
"""
*** Test Cases ***
Test 1
Log Executing test
Test 2
Log Executing test
Test 3
Log Executing test
Test 4
Log Executing test
Test 5
Log Executing test
Test 6
Log Executing test
Test 7
Log Executing test
Test 8
Log Executing test
Test 9
Log Executing test
Test 10
Log Executing test
Test 11
Log Executing test
"""
)
result = ExecutionResult("{}/output.xml".format(self.tmpdir))
test_names = [test.name for test in result.suite.tests]
self.assertEqual(
[
"Test 1",
"Test 2",
"Test 3",
"Test 4",
"Test 5",
"Test 6",
"Test 7",
"Test 8",
"Test 9",
"Test 10",
"Test 11",
],
test_names
)
|
Create test for testcase order in output.
|
Create test for testcase order in output.
|
Python
|
apache-2.0
|
mkorpela/pabot,mkorpela/pabot
|
Create test for testcase order in output.
|
import shutil
import subprocess
import sys
import tempfile
import textwrap
import unittest
from robot.api import ExecutionResult
class PabotTestlevelsplitOutputTaskOrderTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.wait()
def test_testlevelsplit_output_task_order(self):
self._run_tests_with(
"""
*** Test Cases ***
Test 1
Log Executing test
Test 2
Log Executing test
Test 3
Log Executing test
Test 4
Log Executing test
Test 5
Log Executing test
Test 6
Log Executing test
Test 7
Log Executing test
Test 8
Log Executing test
Test 9
Log Executing test
Test 10
Log Executing test
Test 11
Log Executing test
"""
)
result = ExecutionResult("{}/output.xml".format(self.tmpdir))
test_names = [test.name for test in result.suite.tests]
self.assertEqual(
[
"Test 1",
"Test 2",
"Test 3",
"Test 4",
"Test 5",
"Test 6",
"Test 7",
"Test 8",
"Test 9",
"Test 10",
"Test 11",
],
test_names
)
|
<commit_before><commit_msg>Create test for testcase order in output.<commit_after>
|
import shutil
import subprocess
import sys
import tempfile
import textwrap
import unittest
from robot.api import ExecutionResult
class PabotTestlevelsplitOutputTaskOrderTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.wait()
def test_testlevelsplit_output_task_order(self):
self._run_tests_with(
"""
*** Test Cases ***
Test 1
Log Executing test
Test 2
Log Executing test
Test 3
Log Executing test
Test 4
Log Executing test
Test 5
Log Executing test
Test 6
Log Executing test
Test 7
Log Executing test
Test 8
Log Executing test
Test 9
Log Executing test
Test 10
Log Executing test
Test 11
Log Executing test
"""
)
result = ExecutionResult("{}/output.xml".format(self.tmpdir))
test_names = [test.name for test in result.suite.tests]
self.assertEqual(
[
"Test 1",
"Test 2",
"Test 3",
"Test 4",
"Test 5",
"Test 6",
"Test 7",
"Test 8",
"Test 9",
"Test 10",
"Test 11",
],
test_names
)
|
Create test for testcase order in output.import shutil
import subprocess
import sys
import tempfile
import textwrap
import unittest
from robot.api import ExecutionResult
class PabotTestlevelsplitOutputTaskOrderTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.wait()
def test_testlevelsplit_output_task_order(self):
self._run_tests_with(
"""
*** Test Cases ***
Test 1
Log Executing test
Test 2
Log Executing test
Test 3
Log Executing test
Test 4
Log Executing test
Test 5
Log Executing test
Test 6
Log Executing test
Test 7
Log Executing test
Test 8
Log Executing test
Test 9
Log Executing test
Test 10
Log Executing test
Test 11
Log Executing test
"""
)
result = ExecutionResult("{}/output.xml".format(self.tmpdir))
test_names = [test.name for test in result.suite.tests]
self.assertEqual(
[
"Test 1",
"Test 2",
"Test 3",
"Test 4",
"Test 5",
"Test 6",
"Test 7",
"Test 8",
"Test 9",
"Test 10",
"Test 11",
],
test_names
)
|
<commit_before><commit_msg>Create test for testcase order in output.<commit_after>import shutil
import subprocess
import sys
import tempfile
import textwrap
import unittest
from robot.api import ExecutionResult
class PabotTestlevelsplitOutputTaskOrderTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
process.wait()
def test_testlevelsplit_output_task_order(self):
self._run_tests_with(
"""
*** Test Cases ***
Test 1
Log Executing test
Test 2
Log Executing test
Test 3
Log Executing test
Test 4
Log Executing test
Test 5
Log Executing test
Test 6
Log Executing test
Test 7
Log Executing test
Test 8
Log Executing test
Test 9
Log Executing test
Test 10
Log Executing test
Test 11
Log Executing test
"""
)
result = ExecutionResult("{}/output.xml".format(self.tmpdir))
test_names = [test.name for test in result.suite.tests]
self.assertEqual(
[
"Test 1",
"Test 2",
"Test 3",
"Test 4",
"Test 5",
"Test 6",
"Test 7",
"Test 8",
"Test 9",
"Test 10",
"Test 11",
],
test_names
)
|
|
325075d3e6cf85bf8510b6dcc5a29accad5e7bdd
|
tests/bin/test_anchors.py
|
tests/bin/test_anchors.py
|
import keras_retinanet.bin.train
import keras_retinanet.bin.evaluate
from keras_retinanet.bin.train import get_anchors_params
from keras_retinanet.preprocessing.csv_generator import CSVGenerator
from keras_retinanet.utils.anchors import anchors_for_shape
import warnings
def test_csv_generator_anchors():
anchors_dict = get_anchors_params("tests/test-data/anchors.yaml")
train_generator = CSVGenerator(
"tests/test-data/csv/annotations.csv",
"tests/test-data/csv/classes.csv",
transform_generator=None,
batch_size=1,
image_min_side=512,
image_max_side=512,
**anchors_dict
)
inputs,targets = train_generator.next()
regreession_batch,labels_batch = targets
labels = labels_batch[0]
image = inputs[0]
anchors = anchors_for_shape(image.shape,**anchors_dict)
assert len(labels) == len(anchors)
def test_train_generate_anchors_config():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.train.main([
'--epochs=1',
'--steps=1',
'--no-weights',
'--anchors',
'tests/test-data/anchors.yaml',
'--snapshot-path',
'tests/snapshot',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
])
def test_evaluate_config_anchors_params():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.evaluate.main([
'--convert-model',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
'tests/snapshot/resnet50_csv_01.h5'
])
|
Add a script to test the configure of acnhors parameters
|
Add a script to test the configure of acnhors parameters
|
Python
|
apache-2.0
|
delftrobotics/keras-retinanet
|
Add a script to test the configure of acnhors parameters
|
import keras_retinanet.bin.train
import keras_retinanet.bin.evaluate
from keras_retinanet.bin.train import get_anchors_params
from keras_retinanet.preprocessing.csv_generator import CSVGenerator
from keras_retinanet.utils.anchors import anchors_for_shape
import warnings
def test_csv_generator_anchors():
anchors_dict = get_anchors_params("tests/test-data/anchors.yaml")
train_generator = CSVGenerator(
"tests/test-data/csv/annotations.csv",
"tests/test-data/csv/classes.csv",
transform_generator=None,
batch_size=1,
image_min_side=512,
image_max_side=512,
**anchors_dict
)
inputs,targets = train_generator.next()
regreession_batch,labels_batch = targets
labels = labels_batch[0]
image = inputs[0]
anchors = anchors_for_shape(image.shape,**anchors_dict)
assert len(labels) == len(anchors)
def test_train_generate_anchors_config():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.train.main([
'--epochs=1',
'--steps=1',
'--no-weights',
'--anchors',
'tests/test-data/anchors.yaml',
'--snapshot-path',
'tests/snapshot',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
])
def test_evaluate_config_anchors_params():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.evaluate.main([
'--convert-model',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
'tests/snapshot/resnet50_csv_01.h5'
])
|
<commit_before><commit_msg>Add a script to test the configure of acnhors parameters<commit_after>
|
import keras_retinanet.bin.train
import keras_retinanet.bin.evaluate
from keras_retinanet.bin.train import get_anchors_params
from keras_retinanet.preprocessing.csv_generator import CSVGenerator
from keras_retinanet.utils.anchors import anchors_for_shape
import warnings
def test_csv_generator_anchors():
anchors_dict = get_anchors_params("tests/test-data/anchors.yaml")
train_generator = CSVGenerator(
"tests/test-data/csv/annotations.csv",
"tests/test-data/csv/classes.csv",
transform_generator=None,
batch_size=1,
image_min_side=512,
image_max_side=512,
**anchors_dict
)
inputs,targets = train_generator.next()
regreession_batch,labels_batch = targets
labels = labels_batch[0]
image = inputs[0]
anchors = anchors_for_shape(image.shape,**anchors_dict)
assert len(labels) == len(anchors)
def test_train_generate_anchors_config():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.train.main([
'--epochs=1',
'--steps=1',
'--no-weights',
'--anchors',
'tests/test-data/anchors.yaml',
'--snapshot-path',
'tests/snapshot',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
])
def test_evaluate_config_anchors_params():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.evaluate.main([
'--convert-model',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
'tests/snapshot/resnet50_csv_01.h5'
])
|
Add a script to test the configure of acnhors parametersimport keras_retinanet.bin.train
import keras_retinanet.bin.evaluate
from keras_retinanet.bin.train import get_anchors_params
from keras_retinanet.preprocessing.csv_generator import CSVGenerator
from keras_retinanet.utils.anchors import anchors_for_shape
import warnings
def test_csv_generator_anchors():
anchors_dict = get_anchors_params("tests/test-data/anchors.yaml")
train_generator = CSVGenerator(
"tests/test-data/csv/annotations.csv",
"tests/test-data/csv/classes.csv",
transform_generator=None,
batch_size=1,
image_min_side=512,
image_max_side=512,
**anchors_dict
)
inputs,targets = train_generator.next()
regreession_batch,labels_batch = targets
labels = labels_batch[0]
image = inputs[0]
anchors = anchors_for_shape(image.shape,**anchors_dict)
assert len(labels) == len(anchors)
def test_train_generate_anchors_config():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.train.main([
'--epochs=1',
'--steps=1',
'--no-weights',
'--anchors',
'tests/test-data/anchors.yaml',
'--snapshot-path',
'tests/snapshot',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
])
def test_evaluate_config_anchors_params():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.evaluate.main([
'--convert-model',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
'tests/snapshot/resnet50_csv_01.h5'
])
|
<commit_before><commit_msg>Add a script to test the configure of acnhors parameters<commit_after>import keras_retinanet.bin.train
import keras_retinanet.bin.evaluate
from keras_retinanet.bin.train import get_anchors_params
from keras_retinanet.preprocessing.csv_generator import CSVGenerator
from keras_retinanet.utils.anchors import anchors_for_shape
import warnings
def test_csv_generator_anchors():
anchors_dict = get_anchors_params("tests/test-data/anchors.yaml")
train_generator = CSVGenerator(
"tests/test-data/csv/annotations.csv",
"tests/test-data/csv/classes.csv",
transform_generator=None,
batch_size=1,
image_min_side=512,
image_max_side=512,
**anchors_dict
)
inputs,targets = train_generator.next()
regreession_batch,labels_batch = targets
labels = labels_batch[0]
image = inputs[0]
anchors = anchors_for_shape(image.shape,**anchors_dict)
assert len(labels) == len(anchors)
def test_train_generate_anchors_config():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.train.main([
'--epochs=1',
'--steps=1',
'--no-weights',
'--anchors',
'tests/test-data/anchors.yaml',
'--snapshot-path',
'tests/snapshot',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
])
def test_evaluate_config_anchors_params():
# ignore warnings in this test
warnings.simplefilter('ignore')
# run training / evaluation
keras_retinanet.bin.evaluate.main([
'--convert-model',
'csv',
'tests/test-data/csv/annotations.csv',
'tests/test-data/csv/classes.csv',
'tests/snapshot/resnet50_csv_01.h5'
])
|
|
73e3ce5d88adc602f340990cb99e81ab3c9b77d9
|
polyfit_distributions.py
|
polyfit_distributions.py
|
import numpy as np
def main():
np.random.seed(0)
bins = 50
X = np.random.zipf(1.2, 1000)
y = np.histogram(X[X<bins], bins, normed=True)[0]
fn = np.polyfit(np.arange(bins), y, 3)
print(fn)
np.random.seed(0)
bins = 50
samples = 1000
X = [np.random.zipf(1.2, samples),
np.random.zipf(1.3, samples),
np.random.zipf(1.5, samples)]
y = np.array([np.histogram(x[x<b], bins, normed=True)[0] for x in X])
fn = np.polyfit(np.arange(bins), y.T, 3)
print(fn)
if __name__ == '__main__':
main()
|
Build curves for a single zipfian distribution and then 3 combined
|
Build curves for a single zipfian distribution and then 3 combined
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
Build curves for a single zipfian distribution and then 3 combined
|
import numpy as np
def main():
np.random.seed(0)
bins = 50
X = np.random.zipf(1.2, 1000)
y = np.histogram(X[X<bins], bins, normed=True)[0]
fn = np.polyfit(np.arange(bins), y, 3)
print(fn)
np.random.seed(0)
bins = 50
samples = 1000
X = [np.random.zipf(1.2, samples),
np.random.zipf(1.3, samples),
np.random.zipf(1.5, samples)]
y = np.array([np.histogram(x[x<b], bins, normed=True)[0] for x in X])
fn = np.polyfit(np.arange(bins), y.T, 3)
print(fn)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Build curves for a single zipfian distribution and then 3 combined<commit_after>
|
import numpy as np
def main():
np.random.seed(0)
bins = 50
X = np.random.zipf(1.2, 1000)
y = np.histogram(X[X<bins], bins, normed=True)[0]
fn = np.polyfit(np.arange(bins), y, 3)
print(fn)
np.random.seed(0)
bins = 50
samples = 1000
X = [np.random.zipf(1.2, samples),
np.random.zipf(1.3, samples),
np.random.zipf(1.5, samples)]
y = np.array([np.histogram(x[x<b], bins, normed=True)[0] for x in X])
fn = np.polyfit(np.arange(bins), y.T, 3)
print(fn)
if __name__ == '__main__':
main()
|
Build curves for a single zipfian distribution and then 3 combinedimport numpy as np
def main():
np.random.seed(0)
bins = 50
X = np.random.zipf(1.2, 1000)
y = np.histogram(X[X<bins], bins, normed=True)[0]
fn = np.polyfit(np.arange(bins), y, 3)
print(fn)
np.random.seed(0)
bins = 50
samples = 1000
X = [np.random.zipf(1.2, samples),
np.random.zipf(1.3, samples),
np.random.zipf(1.5, samples)]
y = np.array([np.histogram(x[x<b], bins, normed=True)[0] for x in X])
fn = np.polyfit(np.arange(bins), y.T, 3)
print(fn)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Build curves for a single zipfian distribution and then 3 combined<commit_after>import numpy as np
def main():
np.random.seed(0)
bins = 50
X = np.random.zipf(1.2, 1000)
y = np.histogram(X[X<bins], bins, normed=True)[0]
fn = np.polyfit(np.arange(bins), y, 3)
print(fn)
np.random.seed(0)
bins = 50
samples = 1000
X = [np.random.zipf(1.2, samples),
np.random.zipf(1.3, samples),
np.random.zipf(1.5, samples)]
y = np.array([np.histogram(x[x<b], bins, normed=True)[0] for x in X])
fn = np.polyfit(np.arange(bins), y.T, 3)
print(fn)
if __name__ == '__main__':
main()
|
|
9fac1940891a18688afbc5383ba030599aa55ad3
|
candidates/management/commands/candidates_remove_last_party_from_versions.py
|
candidates/management/commands/candidates_remove_last_party_from_versions.py
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
needs_update = False
for version in person.get('versions', []):
data = version['data']
if data.get('last_party'):
needs_update = True
msg = "Fixing person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
del data['last_party']
if not needs_update:
continue
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
Add a command to remove bogus 'last_party' from versions
|
Add a command to remove bogus 'last_party' from versions
|
Python
|
agpl-3.0
|
openstate/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative
|
Add a command to remove bogus 'last_party' from versions
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
needs_update = False
for version in person.get('versions', []):
data = version['data']
if data.get('last_party'):
needs_update = True
msg = "Fixing person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
del data['last_party']
if not needs_update:
continue
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
<commit_before><commit_msg>Add a command to remove bogus 'last_party' from versions<commit_after>
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
needs_update = False
for version in person.get('versions', []):
data = version['data']
if data.get('last_party'):
needs_update = True
msg = "Fixing person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
del data['last_party']
if not needs_update:
continue
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
Add a command to remove bogus 'last_party' from versionsimport sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
needs_update = False
for version in person.get('versions', []):
data = version['data']
if data.get('last_party'):
needs_update = True
msg = "Fixing person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
del data['last_party']
if not needs_update:
continue
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
<commit_before><commit_msg>Add a command to remove bogus 'last_party' from versions<commit_after>import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
needs_update = False
for version in person.get('versions', []):
data = version['data']
if data.get('last_party'):
needs_update = True
msg = "Fixing person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
del data['last_party']
if not needs_update:
continue
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
|
646a6a3acfbe2b2abd5c8360c45041676fdf7b73
|
display.py
|
display.py
|
# -*- coding: utf-8 -*-
"""This is the display module to show a chess board"""
def render_ascii_board(matrix):
end_line_template = ' ---------------------------------\n'
intermediary_line_template = ' |-------------------------------|\n'
file_template = ' a b c d e f g h\n'
rank_template = '%s | %s | %s | %s | %s | %s | %s | %s | %s |\n'
board = end_line_template
for index, rank in enumerate(matrix):
board += rank_template % tuple(str(8 - index) + rank)
if index < 7:
board += intermediary_line_template
board += end_line_template + file_template
return board
|
Add code to print a chess board in ASCII format.
|
Add code to print a chess board in ASCII format.
|
Python
|
mit
|
gisraptor/fen-parser
|
Add code to print a chess board in ASCII format.
|
# -*- coding: utf-8 -*-
"""This is the display module to show a chess board"""
def render_ascii_board(matrix):
end_line_template = ' ---------------------------------\n'
intermediary_line_template = ' |-------------------------------|\n'
file_template = ' a b c d e f g h\n'
rank_template = '%s | %s | %s | %s | %s | %s | %s | %s | %s |\n'
board = end_line_template
for index, rank in enumerate(matrix):
board += rank_template % tuple(str(8 - index) + rank)
if index < 7:
board += intermediary_line_template
board += end_line_template + file_template
return board
|
<commit_before><commit_msg>Add code to print a chess board in ASCII format.<commit_after>
|
# -*- coding: utf-8 -*-
"""This is the display module to show a chess board"""
def render_ascii_board(matrix):
end_line_template = ' ---------------------------------\n'
intermediary_line_template = ' |-------------------------------|\n'
file_template = ' a b c d e f g h\n'
rank_template = '%s | %s | %s | %s | %s | %s | %s | %s | %s |\n'
board = end_line_template
for index, rank in enumerate(matrix):
board += rank_template % tuple(str(8 - index) + rank)
if index < 7:
board += intermediary_line_template
board += end_line_template + file_template
return board
|
Add code to print a chess board in ASCII format.# -*- coding: utf-8 -*-
"""This is the display module to show a chess board"""
def render_ascii_board(matrix):
end_line_template = ' ---------------------------------\n'
intermediary_line_template = ' |-------------------------------|\n'
file_template = ' a b c d e f g h\n'
rank_template = '%s | %s | %s | %s | %s | %s | %s | %s | %s |\n'
board = end_line_template
for index, rank in enumerate(matrix):
board += rank_template % tuple(str(8 - index) + rank)
if index < 7:
board += intermediary_line_template
board += end_line_template + file_template
return board
|
<commit_before><commit_msg>Add code to print a chess board in ASCII format.<commit_after># -*- coding: utf-8 -*-
"""This is the display module to show a chess board"""
def render_ascii_board(matrix):
end_line_template = ' ---------------------------------\n'
intermediary_line_template = ' |-------------------------------|\n'
file_template = ' a b c d e f g h\n'
rank_template = '%s | %s | %s | %s | %s | %s | %s | %s | %s |\n'
board = end_line_template
for index, rank in enumerate(matrix):
board += rank_template % tuple(str(8 - index) + rank)
if index < 7:
board += intermediary_line_template
board += end_line_template + file_template
return board
|
|
443ed8e2782faad53f002f74d1aaa032c70915bf
|
tests/test_libudev.py
|
tests/test_libudev.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import pytest
from pyudev import _libudev as binding
libudev = binding.libudev
def pytest_generate_tests(metafunc):
if 'funcname' in metafunc.funcargnames:
for namespace, members in binding.SIGNATURES.items():
for funcname in members:
full_name = '{0}_{1}'.format(namespace, funcname)
metafunc.addcall(param=(namespace, funcname), id=full_name,
funcargs=dict(funcname=full_name))
def pytest_funcarg__signature(request):
namespace, name = request.param
return binding.SIGNATURES[namespace][name]
def pytest_funcarg__argtypes(request):
argtypes, _ = request.getfuncargvalue('signature')
return argtypes
def pytest_funcarg__restype(request):
_, restype = request.getfuncargvalue('signature')
return restype
def pytest_funcarg__errcheck(request):
funcname = request.getfuncargvalue('funcname')
return binding.ERROR_CHECKERS.get(funcname)
def test_signatures(funcname, restype, argtypes, errcheck):
assert hasattr(libudev, funcname)
func = getattr(libudev, funcname)
assert func.restype == restype
assert func.argtypes == argtypes
assert func.errcheck == errcheck
|
Test for presence and signature of wrapped libudev functions
|
Test for presence and signature of wrapped libudev functions
|
Python
|
lgpl-2.1
|
deepakkapoor624/pyudev,mulkieran/pyudev,deepakkapoor624/pyudev,mulkieran/pyudev,mulkieran/pyudev,pyudev/pyudev
|
Test for presence and signature of wrapped libudev functions
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import pytest
from pyudev import _libudev as binding
libudev = binding.libudev
def pytest_generate_tests(metafunc):
if 'funcname' in metafunc.funcargnames:
for namespace, members in binding.SIGNATURES.items():
for funcname in members:
full_name = '{0}_{1}'.format(namespace, funcname)
metafunc.addcall(param=(namespace, funcname), id=full_name,
funcargs=dict(funcname=full_name))
def pytest_funcarg__signature(request):
namespace, name = request.param
return binding.SIGNATURES[namespace][name]
def pytest_funcarg__argtypes(request):
argtypes, _ = request.getfuncargvalue('signature')
return argtypes
def pytest_funcarg__restype(request):
_, restype = request.getfuncargvalue('signature')
return restype
def pytest_funcarg__errcheck(request):
funcname = request.getfuncargvalue('funcname')
return binding.ERROR_CHECKERS.get(funcname)
def test_signatures(funcname, restype, argtypes, errcheck):
assert hasattr(libudev, funcname)
func = getattr(libudev, funcname)
assert func.restype == restype
assert func.argtypes == argtypes
assert func.errcheck == errcheck
|
<commit_before><commit_msg>Test for presence and signature of wrapped libudev functions<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import pytest
from pyudev import _libudev as binding
libudev = binding.libudev
def pytest_generate_tests(metafunc):
if 'funcname' in metafunc.funcargnames:
for namespace, members in binding.SIGNATURES.items():
for funcname in members:
full_name = '{0}_{1}'.format(namespace, funcname)
metafunc.addcall(param=(namespace, funcname), id=full_name,
funcargs=dict(funcname=full_name))
def pytest_funcarg__signature(request):
namespace, name = request.param
return binding.SIGNATURES[namespace][name]
def pytest_funcarg__argtypes(request):
argtypes, _ = request.getfuncargvalue('signature')
return argtypes
def pytest_funcarg__restype(request):
_, restype = request.getfuncargvalue('signature')
return restype
def pytest_funcarg__errcheck(request):
funcname = request.getfuncargvalue('funcname')
return binding.ERROR_CHECKERS.get(funcname)
def test_signatures(funcname, restype, argtypes, errcheck):
assert hasattr(libudev, funcname)
func = getattr(libudev, funcname)
assert func.restype == restype
assert func.argtypes == argtypes
assert func.errcheck == errcheck
|
Test for presence and signature of wrapped libudev functions# -*- coding: utf-8 -*-
# Copyright (C) 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import pytest
from pyudev import _libudev as binding
libudev = binding.libudev
def pytest_generate_tests(metafunc):
if 'funcname' in metafunc.funcargnames:
for namespace, members in binding.SIGNATURES.items():
for funcname in members:
full_name = '{0}_{1}'.format(namespace, funcname)
metafunc.addcall(param=(namespace, funcname), id=full_name,
funcargs=dict(funcname=full_name))
def pytest_funcarg__signature(request):
namespace, name = request.param
return binding.SIGNATURES[namespace][name]
def pytest_funcarg__argtypes(request):
argtypes, _ = request.getfuncargvalue('signature')
return argtypes
def pytest_funcarg__restype(request):
_, restype = request.getfuncargvalue('signature')
return restype
def pytest_funcarg__errcheck(request):
funcname = request.getfuncargvalue('funcname')
return binding.ERROR_CHECKERS.get(funcname)
def test_signatures(funcname, restype, argtypes, errcheck):
assert hasattr(libudev, funcname)
func = getattr(libudev, funcname)
assert func.restype == restype
assert func.argtypes == argtypes
assert func.errcheck == errcheck
|
<commit_before><commit_msg>Test for presence and signature of wrapped libudev functions<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import pytest
from pyudev import _libudev as binding
libudev = binding.libudev
def pytest_generate_tests(metafunc):
if 'funcname' in metafunc.funcargnames:
for namespace, members in binding.SIGNATURES.items():
for funcname in members:
full_name = '{0}_{1}'.format(namespace, funcname)
metafunc.addcall(param=(namespace, funcname), id=full_name,
funcargs=dict(funcname=full_name))
def pytest_funcarg__signature(request):
namespace, name = request.param
return binding.SIGNATURES[namespace][name]
def pytest_funcarg__argtypes(request):
argtypes, _ = request.getfuncargvalue('signature')
return argtypes
def pytest_funcarg__restype(request):
_, restype = request.getfuncargvalue('signature')
return restype
def pytest_funcarg__errcheck(request):
funcname = request.getfuncargvalue('funcname')
return binding.ERROR_CHECKERS.get(funcname)
def test_signatures(funcname, restype, argtypes, errcheck):
assert hasattr(libudev, funcname)
func = getattr(libudev, funcname)
assert func.restype == restype
assert func.argtypes == argtypes
assert func.errcheck == errcheck
|
|
f78d1b549fa4d659ade9fcf03b2d85926eabd29e
|
lambdaCheckDownload.py
|
lambdaCheckDownload.py
|
#!/usr/bin/python
import boto3
import datetime
import dateutil.relativedelta as rd
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
sns = boto3.client('sns')
def keySize(key):
try:
response = s3.head_object(Bucket='ictrp-data', Key=key)
return '{:0.1f}'.format(response['ContentLength'] / (1024.0 * 1024))
except ClientError:
return None
def lambda_handler(event, context):
today = datetime.datetime.today()
weekago = today - rd.relativedelta(days=7)
dataset0 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*weekago.isocalendar())
dataset1 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*today.isocalendar())
msg = '{}: {}\n{}: {}\n'.format(dataset0, keySize(dataset0), dataset1, keySize(dataset1))
sns.publish(TopicArn='arn:aws:sns:eu-central-1:743731058442:ictrp',
Message=msg,
Subject='ICTRP download result')
|
Add a lambda to check on previous downloads
|
Add a lambda to check on previous downloads
|
Python
|
mit
|
gertvv/ictrp-retrieval,gertvv/ictrp-retrieval
|
Add a lambda to check on previous downloads
|
#!/usr/bin/python
import boto3
import datetime
import dateutil.relativedelta as rd
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
sns = boto3.client('sns')
def keySize(key):
try:
response = s3.head_object(Bucket='ictrp-data', Key=key)
return '{:0.1f}'.format(response['ContentLength'] / (1024.0 * 1024))
except ClientError:
return None
def lambda_handler(event, context):
today = datetime.datetime.today()
weekago = today - rd.relativedelta(days=7)
dataset0 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*weekago.isocalendar())
dataset1 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*today.isocalendar())
msg = '{}: {}\n{}: {}\n'.format(dataset0, keySize(dataset0), dataset1, keySize(dataset1))
sns.publish(TopicArn='arn:aws:sns:eu-central-1:743731058442:ictrp',
Message=msg,
Subject='ICTRP download result')
|
<commit_before><commit_msg>Add a lambda to check on previous downloads<commit_after>
|
#!/usr/bin/python
import boto3
import datetime
import dateutil.relativedelta as rd
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
sns = boto3.client('sns')
def keySize(key):
try:
response = s3.head_object(Bucket='ictrp-data', Key=key)
return '{:0.1f}'.format(response['ContentLength'] / (1024.0 * 1024))
except ClientError:
return None
def lambda_handler(event, context):
today = datetime.datetime.today()
weekago = today - rd.relativedelta(days=7)
dataset0 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*weekago.isocalendar())
dataset1 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*today.isocalendar())
msg = '{}: {}\n{}: {}\n'.format(dataset0, keySize(dataset0), dataset1, keySize(dataset1))
sns.publish(TopicArn='arn:aws:sns:eu-central-1:743731058442:ictrp',
Message=msg,
Subject='ICTRP download result')
|
Add a lambda to check on previous downloads#!/usr/bin/python
import boto3
import datetime
import dateutil.relativedelta as rd
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
sns = boto3.client('sns')
def keySize(key):
try:
response = s3.head_object(Bucket='ictrp-data', Key=key)
return '{:0.1f}'.format(response['ContentLength'] / (1024.0 * 1024))
except ClientError:
return None
def lambda_handler(event, context):
today = datetime.datetime.today()
weekago = today - rd.relativedelta(days=7)
dataset0 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*weekago.isocalendar())
dataset1 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*today.isocalendar())
msg = '{}: {}\n{}: {}\n'.format(dataset0, keySize(dataset0), dataset1, keySize(dataset1))
sns.publish(TopicArn='arn:aws:sns:eu-central-1:743731058442:ictrp',
Message=msg,
Subject='ICTRP download result')
|
<commit_before><commit_msg>Add a lambda to check on previous downloads<commit_after>#!/usr/bin/python
import boto3
import datetime
import dateutil.relativedelta as rd
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
sns = boto3.client('sns')
def keySize(key):
try:
response = s3.head_object(Bucket='ictrp-data', Key=key)
return '{:0.1f}'.format(response['ContentLength'] / (1024.0 * 1024))
except ClientError:
return None
def lambda_handler(event, context):
today = datetime.datetime.today()
weekago = today - rd.relativedelta(days=7)
dataset0 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*weekago.isocalendar())
dataset1 = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*today.isocalendar())
msg = '{}: {}\n{}: {}\n'.format(dataset0, keySize(dataset0), dataset1, keySize(dataset1))
sns.publish(TopicArn='arn:aws:sns:eu-central-1:743731058442:ictrp',
Message=msg,
Subject='ICTRP download result')
|
|
1026256a525a73a404fe195e0a103a007da9ceeb
|
test/unit/test_bark.py
|
test/unit/test_bark.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
import mock
import bark
def test_default_configure():
'''Test configure helper with no arguments.'''
configurators = {'classic': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure()
assert configurators['classic'].called
def test_custom_configure():
'''Test configure helper with specific configurator.'''
configurators = {'other': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure(configurator='other')
assert configurators['other'].called
def test_configure_with_missing_configurator():
'''Test configure raised ValueError for missing configurator.'''
with mock.patch.dict(bark.configurators, clear=True):
with pytest.raises(ValueError):
bark.configure(configurator='other')
|
Add unit tests for configure helper.
|
Add unit tests for configure helper.
|
Python
|
apache-2.0
|
4degrees/mill,4degrees/sawmill
|
Add unit tests for configure helper.
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
import mock
import bark
def test_default_configure():
'''Test configure helper with no arguments.'''
configurators = {'classic': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure()
assert configurators['classic'].called
def test_custom_configure():
'''Test configure helper with specific configurator.'''
configurators = {'other': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure(configurator='other')
assert configurators['other'].called
def test_configure_with_missing_configurator():
'''Test configure raised ValueError for missing configurator.'''
with mock.patch.dict(bark.configurators, clear=True):
with pytest.raises(ValueError):
bark.configure(configurator='other')
|
<commit_before><commit_msg>Add unit tests for configure helper.<commit_after>
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
import mock
import bark
def test_default_configure():
'''Test configure helper with no arguments.'''
configurators = {'classic': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure()
assert configurators['classic'].called
def test_custom_configure():
'''Test configure helper with specific configurator.'''
configurators = {'other': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure(configurator='other')
assert configurators['other'].called
def test_configure_with_missing_configurator():
'''Test configure raised ValueError for missing configurator.'''
with mock.patch.dict(bark.configurators, clear=True):
with pytest.raises(ValueError):
bark.configure(configurator='other')
|
Add unit tests for configure helper.# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
import mock
import bark
def test_default_configure():
'''Test configure helper with no arguments.'''
configurators = {'classic': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure()
assert configurators['classic'].called
def test_custom_configure():
'''Test configure helper with specific configurator.'''
configurators = {'other': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure(configurator='other')
assert configurators['other'].called
def test_configure_with_missing_configurator():
'''Test configure raised ValueError for missing configurator.'''
with mock.patch.dict(bark.configurators, clear=True):
with pytest.raises(ValueError):
bark.configure(configurator='other')
|
<commit_before><commit_msg>Add unit tests for configure helper.<commit_after># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
import mock
import bark
def test_default_configure():
'''Test configure helper with no arguments.'''
configurators = {'classic': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure()
assert configurators['classic'].called
def test_custom_configure():
'''Test configure helper with specific configurator.'''
configurators = {'other': mock.Mock()}
with mock.patch.dict(
bark.configurators, configurators, clear=True
):
bark.configure(configurator='other')
assert configurators['other'].called
def test_configure_with_missing_configurator():
'''Test configure raised ValueError for missing configurator.'''
with mock.patch.dict(bark.configurators, clear=True):
with pytest.raises(ValueError):
bark.configure(configurator='other')
|
|
4ad1576aa97fc83d344c50ac4695019ce2e43f61
|
hordak/migrations/0010_auto_20161216_1202.py
|
hordak/migrations/0010_auto_20161216_1202.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-16 18:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hordak', '0009_bank_accounts_are_asset_accounts'),
]
operations = [
migrations.AlterField(
model_name='account',
name='_type',
field=models.CharField(blank=True, choices=[('AS', 'Asset'), ('LI', 'Liability'), ('IN', 'Income'), ('EX', 'Expense'), ('EQ', 'Equity'), ('TR', 'Currency Trading')], max_length=2),
),
migrations.AlterField(
model_name='account',
name='is_bank_account',
field=models.BooleanField(default=False, help_text='Is this a bank account. This implies we can import bank statements into it and that it only supports a single currency'),
),
]
|
Add missing migration for account model
|
Add missing migration for account model
|
Python
|
mit
|
adamcharnock/django-hordak,adamcharnock/django-hordak,waldocollective/django-hordak
|
Add missing migration for account model
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-16 18:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hordak', '0009_bank_accounts_are_asset_accounts'),
]
operations = [
migrations.AlterField(
model_name='account',
name='_type',
field=models.CharField(blank=True, choices=[('AS', 'Asset'), ('LI', 'Liability'), ('IN', 'Income'), ('EX', 'Expense'), ('EQ', 'Equity'), ('TR', 'Currency Trading')], max_length=2),
),
migrations.AlterField(
model_name='account',
name='is_bank_account',
field=models.BooleanField(default=False, help_text='Is this a bank account. This implies we can import bank statements into it and that it only supports a single currency'),
),
]
|
<commit_before><commit_msg>Add missing migration for account model<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-16 18:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hordak', '0009_bank_accounts_are_asset_accounts'),
]
operations = [
migrations.AlterField(
model_name='account',
name='_type',
field=models.CharField(blank=True, choices=[('AS', 'Asset'), ('LI', 'Liability'), ('IN', 'Income'), ('EX', 'Expense'), ('EQ', 'Equity'), ('TR', 'Currency Trading')], max_length=2),
),
migrations.AlterField(
model_name='account',
name='is_bank_account',
field=models.BooleanField(default=False, help_text='Is this a bank account. This implies we can import bank statements into it and that it only supports a single currency'),
),
]
|
Add missing migration for account model# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-16 18:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hordak', '0009_bank_accounts_are_asset_accounts'),
]
operations = [
migrations.AlterField(
model_name='account',
name='_type',
field=models.CharField(blank=True, choices=[('AS', 'Asset'), ('LI', 'Liability'), ('IN', 'Income'), ('EX', 'Expense'), ('EQ', 'Equity'), ('TR', 'Currency Trading')], max_length=2),
),
migrations.AlterField(
model_name='account',
name='is_bank_account',
field=models.BooleanField(default=False, help_text='Is this a bank account. This implies we can import bank statements into it and that it only supports a single currency'),
),
]
|
<commit_before><commit_msg>Add missing migration for account model<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-16 18:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hordak', '0009_bank_accounts_are_asset_accounts'),
]
operations = [
migrations.AlterField(
model_name='account',
name='_type',
field=models.CharField(blank=True, choices=[('AS', 'Asset'), ('LI', 'Liability'), ('IN', 'Income'), ('EX', 'Expense'), ('EQ', 'Equity'), ('TR', 'Currency Trading')], max_length=2),
),
migrations.AlterField(
model_name='account',
name='is_bank_account',
field=models.BooleanField(default=False, help_text='Is this a bank account. This implies we can import bank statements into it and that it only supports a single currency'),
),
]
|
|
c2e861adbb0174747e5ec8a75239f015ff67c244
|
ideascale/migrations/0010_auto_20150513_1146.py
|
ideascale/migrations/0010_auto_20150513_1146.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ideascale', '0009_auto_20150430_0006'),
]
operations = [
migrations.RenameModel(
old_name='TestingParameters',
new_name='TestingParameter',
),
]
|
Modify ideascale connect app data model
|
Modify ideascale connect app data model
|
Python
|
mit
|
joausaga/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,joausaga/social-ideation,joausaga/social-ideation
|
Modify ideascale connect app data model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ideascale', '0009_auto_20150430_0006'),
]
operations = [
migrations.RenameModel(
old_name='TestingParameters',
new_name='TestingParameter',
),
]
|
<commit_before><commit_msg>Modify ideascale connect app data model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ideascale', '0009_auto_20150430_0006'),
]
operations = [
migrations.RenameModel(
old_name='TestingParameters',
new_name='TestingParameter',
),
]
|
Modify ideascale connect app data model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ideascale', '0009_auto_20150430_0006'),
]
operations = [
migrations.RenameModel(
old_name='TestingParameters',
new_name='TestingParameter',
),
]
|
<commit_before><commit_msg>Modify ideascale connect app data model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ideascale', '0009_auto_20150430_0006'),
]
operations = [
migrations.RenameModel(
old_name='TestingParameters',
new_name='TestingParameter',
),
]
|
|
43ed3eeacf353404acf77c106aacb143f1c2c2ea
|
tests/instancing_tests/NonterminalsTest.py
|
tests/instancing_tests/NonterminalsTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
Add tests of instancing nonterminals
|
Add tests of instancing nonterminals
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add tests of instancing nonterminals
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tests of instancing nonterminals<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
Add tests of instancing nonterminals#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tests of instancing nonterminals<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
|
821fc882ec14d6af626a69d0ac70c3428d39d812
|
tests/integration/container_create_test.py
|
tests/integration/container_create_test.py
|
import pytest
import contextlib
import os
import re
from xd.docker.client import *
from xd.docker.container import *
from xd.docker.parameters import *
@pytest.fixture
def docker_with_busybox(docker):
docker.image_pull("busybox")
return docker
def test_container_create_1(docker_with_busybox, stdout):
with stdout.redirect():
container = docker_with_busybox.container_create(
ContainerConfig("busybox"),
"xd-docker-container-create-1")
assert container is not None
assert isinstance(container, Container)
assert re.match('^[0-9a-f]+$', container.id)
|
Add simple container_create() integration test
|
client: Add simple container_create() integration test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>
|
Python
|
mit
|
XD-embedded/xd-docker,XD-embedded/xd-docker,esben/xd-docker,esben/xd-docker
|
client: Add simple container_create() integration test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>
|
import pytest
import contextlib
import os
import re
from xd.docker.client import *
from xd.docker.container import *
from xd.docker.parameters import *
@pytest.fixture
def docker_with_busybox(docker):
docker.image_pull("busybox")
return docker
def test_container_create_1(docker_with_busybox, stdout):
with stdout.redirect():
container = docker_with_busybox.container_create(
ContainerConfig("busybox"),
"xd-docker-container-create-1")
assert container is not None
assert isinstance(container, Container)
assert re.match('^[0-9a-f]+$', container.id)
|
<commit_before><commit_msg>client: Add simple container_create() integration test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk><commit_after>
|
import pytest
import contextlib
import os
import re
from xd.docker.client import *
from xd.docker.container import *
from xd.docker.parameters import *
@pytest.fixture
def docker_with_busybox(docker):
docker.image_pull("busybox")
return docker
def test_container_create_1(docker_with_busybox, stdout):
with stdout.redirect():
container = docker_with_busybox.container_create(
ContainerConfig("busybox"),
"xd-docker-container-create-1")
assert container is not None
assert isinstance(container, Container)
assert re.match('^[0-9a-f]+$', container.id)
|
client: Add simple container_create() integration test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>import pytest
import contextlib
import os
import re
from xd.docker.client import *
from xd.docker.container import *
from xd.docker.parameters import *
@pytest.fixture
def docker_with_busybox(docker):
docker.image_pull("busybox")
return docker
def test_container_create_1(docker_with_busybox, stdout):
with stdout.redirect():
container = docker_with_busybox.container_create(
ContainerConfig("busybox"),
"xd-docker-container-create-1")
assert container is not None
assert isinstance(container, Container)
assert re.match('^[0-9a-f]+$', container.id)
|
<commit_before><commit_msg>client: Add simple container_create() integration test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk><commit_after>import pytest
import contextlib
import os
import re
from xd.docker.client import *
from xd.docker.container import *
from xd.docker.parameters import *
@pytest.fixture
def docker_with_busybox(docker):
docker.image_pull("busybox")
return docker
def test_container_create_1(docker_with_busybox, stdout):
with stdout.redirect():
container = docker_with_busybox.container_create(
ContainerConfig("busybox"),
"xd-docker-container-create-1")
assert container is not None
assert isinstance(container, Container)
assert re.match('^[0-9a-f]+$', container.id)
|
|
7dc8c40356733042c1c30ad180afb8605aab871d
|
scripts/submit_build.py
|
scripts/submit_build.py
|
#!/usr/bin/python
"""
Notifies HQ of a new CommCare version. If artifacts are present, the build is
considered a J2ME build.
Requires an ApiUser (corehq.apps.api.models.ApiUser) on the remote_host with
username/password given
"""
import os
import shlex
import subprocess
from subprocess import PIPE
import sys
def submit_build(environ, host):
target_url = host + "/builds/post/"
if "ARTIFACTS" in environ:
raw_command = ('curl -v -H "Expect:" -F "artifacts=@{ARTIFACTS}" ' +
'-F "username={USERNAME}" -F "password={PASSWORD}" ' +
'-F "build_number={BUILD_NUMBER}" ' +
'-F "version={VERSION}" {target_url}')
else:
raw_command = ('curl -v -H "Expect:" -F "username={USERNAME}" ' +
'-F "password={PASSWORD}" ' +
'-F "version={VERSION}" {target_url}')
command = raw_command.format(target_url=target_url, **environ)
p = subprocess.Popen(shlex.split(command),
stdout=PIPE, stderr=None, shell=False)
return command, p.stdout.read(), ""
if __name__ == "__main__":
variables = ["USERNAME",
"PASSWORD",
"ARTIFACTS",
"REMOTE_HOST",
"VERSION",
"BUILD_NUMBER"]
args = sys.argv[1:]
environ = None
try:
environ = dict([(var, os.environ[var]) for var in variables])
except KeyError:
if len(args) == len(variables):
environ = dict(zip(variables, args))
if environ:
hosts = environ['REMOTE_HOST'].split("+")
for host in hosts:
command, out, err = submit_build(environ, host)
print command
if out.strip():
print "--------STDOUT--------"
print out
if err.strip():
print "--------STDERR--------"
print err
else:
print("submit_build.py <%s>" % ("> <".join(variables)))
|
Copy build post script from commcare-j2me
|
Copy build post script from commcare-j2me
|
Python
|
apache-2.0
|
dimagi/commcare-core,dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare,dimagi/commcare
|
Copy build post script from commcare-j2me
|
#!/usr/bin/python
"""
Notifies HQ of a new CommCare version. If artifacts are present, the build is
considered a J2ME build.
Requires an ApiUser (corehq.apps.api.models.ApiUser) on the remote_host with
username/password given
"""
import os
import shlex
import subprocess
from subprocess import PIPE
import sys
def submit_build(environ, host):
target_url = host + "/builds/post/"
if "ARTIFACTS" in environ:
raw_command = ('curl -v -H "Expect:" -F "artifacts=@{ARTIFACTS}" ' +
'-F "username={USERNAME}" -F "password={PASSWORD}" ' +
'-F "build_number={BUILD_NUMBER}" ' +
'-F "version={VERSION}" {target_url}')
else:
raw_command = ('curl -v -H "Expect:" -F "username={USERNAME}" ' +
'-F "password={PASSWORD}" ' +
'-F "version={VERSION}" {target_url}')
command = raw_command.format(target_url=target_url, **environ)
p = subprocess.Popen(shlex.split(command),
stdout=PIPE, stderr=None, shell=False)
return command, p.stdout.read(), ""
if __name__ == "__main__":
variables = ["USERNAME",
"PASSWORD",
"ARTIFACTS",
"REMOTE_HOST",
"VERSION",
"BUILD_NUMBER"]
args = sys.argv[1:]
environ = None
try:
environ = dict([(var, os.environ[var]) for var in variables])
except KeyError:
if len(args) == len(variables):
environ = dict(zip(variables, args))
if environ:
hosts = environ['REMOTE_HOST'].split("+")
for host in hosts:
command, out, err = submit_build(environ, host)
print command
if out.strip():
print "--------STDOUT--------"
print out
if err.strip():
print "--------STDERR--------"
print err
else:
print("submit_build.py <%s>" % ("> <".join(variables)))
|
<commit_before><commit_msg>Copy build post script from commcare-j2me<commit_after>
|
#!/usr/bin/python
"""
Notifies HQ of a new CommCare version. If artifacts are present, the build is
considered a J2ME build.
Requires an ApiUser (corehq.apps.api.models.ApiUser) on the remote_host with
username/password given
"""
import os
import shlex
import subprocess
from subprocess import PIPE
import sys
def submit_build(environ, host):
target_url = host + "/builds/post/"
if "ARTIFACTS" in environ:
raw_command = ('curl -v -H "Expect:" -F "artifacts=@{ARTIFACTS}" ' +
'-F "username={USERNAME}" -F "password={PASSWORD}" ' +
'-F "build_number={BUILD_NUMBER}" ' +
'-F "version={VERSION}" {target_url}')
else:
raw_command = ('curl -v -H "Expect:" -F "username={USERNAME}" ' +
'-F "password={PASSWORD}" ' +
'-F "version={VERSION}" {target_url}')
command = raw_command.format(target_url=target_url, **environ)
p = subprocess.Popen(shlex.split(command),
stdout=PIPE, stderr=None, shell=False)
return command, p.stdout.read(), ""
if __name__ == "__main__":
variables = ["USERNAME",
"PASSWORD",
"ARTIFACTS",
"REMOTE_HOST",
"VERSION",
"BUILD_NUMBER"]
args = sys.argv[1:]
environ = None
try:
environ = dict([(var, os.environ[var]) for var in variables])
except KeyError:
if len(args) == len(variables):
environ = dict(zip(variables, args))
if environ:
hosts = environ['REMOTE_HOST'].split("+")
for host in hosts:
command, out, err = submit_build(environ, host)
print command
if out.strip():
print "--------STDOUT--------"
print out
if err.strip():
print "--------STDERR--------"
print err
else:
print("submit_build.py <%s>" % ("> <".join(variables)))
|
Copy build post script from commcare-j2me#!/usr/bin/python
"""
Notifies HQ of a new CommCare version. If artifacts are present, the build is
considered a J2ME build.
Requires an ApiUser (corehq.apps.api.models.ApiUser) on the remote_host with
username/password given
"""
import os
import shlex
import subprocess
from subprocess import PIPE
import sys
def submit_build(environ, host):
target_url = host + "/builds/post/"
if "ARTIFACTS" in environ:
raw_command = ('curl -v -H "Expect:" -F "artifacts=@{ARTIFACTS}" ' +
'-F "username={USERNAME}" -F "password={PASSWORD}" ' +
'-F "build_number={BUILD_NUMBER}" ' +
'-F "version={VERSION}" {target_url}')
else:
raw_command = ('curl -v -H "Expect:" -F "username={USERNAME}" ' +
'-F "password={PASSWORD}" ' +
'-F "version={VERSION}" {target_url}')
command = raw_command.format(target_url=target_url, **environ)
p = subprocess.Popen(shlex.split(command),
stdout=PIPE, stderr=None, shell=False)
return command, p.stdout.read(), ""
if __name__ == "__main__":
variables = ["USERNAME",
"PASSWORD",
"ARTIFACTS",
"REMOTE_HOST",
"VERSION",
"BUILD_NUMBER"]
args = sys.argv[1:]
environ = None
try:
environ = dict([(var, os.environ[var]) for var in variables])
except KeyError:
if len(args) == len(variables):
environ = dict(zip(variables, args))
if environ:
hosts = environ['REMOTE_HOST'].split("+")
for host in hosts:
command, out, err = submit_build(environ, host)
print command
if out.strip():
print "--------STDOUT--------"
print out
if err.strip():
print "--------STDERR--------"
print err
else:
print("submit_build.py <%s>" % ("> <".join(variables)))
|
<commit_before><commit_msg>Copy build post script from commcare-j2me<commit_after>#!/usr/bin/python
"""
Notifies HQ of a new CommCare version. If artifacts are present, the build is
considered a J2ME build.
Requires an ApiUser (corehq.apps.api.models.ApiUser) on the remote_host with
username/password given
"""
import os
import shlex
import subprocess
from subprocess import PIPE
import sys
def submit_build(environ, host):
target_url = host + "/builds/post/"
if "ARTIFACTS" in environ:
raw_command = ('curl -v -H "Expect:" -F "artifacts=@{ARTIFACTS}" ' +
'-F "username={USERNAME}" -F "password={PASSWORD}" ' +
'-F "build_number={BUILD_NUMBER}" ' +
'-F "version={VERSION}" {target_url}')
else:
raw_command = ('curl -v -H "Expect:" -F "username={USERNAME}" ' +
'-F "password={PASSWORD}" ' +
'-F "version={VERSION}" {target_url}')
command = raw_command.format(target_url=target_url, **environ)
p = subprocess.Popen(shlex.split(command),
stdout=PIPE, stderr=None, shell=False)
return command, p.stdout.read(), ""
if __name__ == "__main__":
variables = ["USERNAME",
"PASSWORD",
"ARTIFACTS",
"REMOTE_HOST",
"VERSION",
"BUILD_NUMBER"]
args = sys.argv[1:]
environ = None
try:
environ = dict([(var, os.environ[var]) for var in variables])
except KeyError:
if len(args) == len(variables):
environ = dict(zip(variables, args))
if environ:
hosts = environ['REMOTE_HOST'].split("+")
for host in hosts:
command, out, err = submit_build(environ, host)
print command
if out.strip():
print "--------STDOUT--------"
print out
if err.strip():
print "--------STDERR--------"
print err
else:
print("submit_build.py <%s>" % ("> <".join(variables)))
|
|
d6f04cb1b9383122e673a4d8d314c2fdc0191608
|
AndroidGatewayPlugin/Testdriver/Consumer.py
|
AndroidGatewayPlugin/Testdriver/Consumer.py
|
import AndroidConnector
import uuid
import sys
import time
import datetime
import AmmoMessages_pb2
from twisted.internet import reactor
def onDataReceived(connector, msg):
messageType = AmmoMessages_pb2._MESSAGEWRAPPER_MESSAGETYPE.values_by_number[msg.type].name
print messageType, datetime.datetime.now(),
if msg.type == AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE:
print "::", msg.data_message.data
else:
print ""
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], "host port"
exit(-1)
deviceName = "device:test/" + uuid.uuid1().hex
userName = "user:test/" + uuid.uuid1().hex
connector = AndroidConnector.AndroidConnector(sys.argv[1], int(sys.argv[2]), deviceName, userName, "")
connector.setMessageQueueEnabled(False)
connector.registerMessageCallback(onDataReceived)
try:
connector.start()
connector.waitForAuthentication()
print "Subscribing."
connector.subscribe("application/vnd.edu.vu.isis.ammo.test.TestData")
sequenceNumber = 0
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print "Got ^C... Closing"
reactor.callFromThread(reactor.stop)
except:
print "Unexpected error... dying."
reactor.callFromThread(reactor.stop)
raise
|
Add consumer testdriver (subscribes to a topic and prints what it gets)
|
Add consumer testdriver (subscribes to a topic and prints what it gets)
|
Python
|
mit
|
isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway
|
Add consumer testdriver (subscribes to a topic and prints what it gets)
|
import AndroidConnector
import uuid
import sys
import time
import datetime
import AmmoMessages_pb2
from twisted.internet import reactor
def onDataReceived(connector, msg):
messageType = AmmoMessages_pb2._MESSAGEWRAPPER_MESSAGETYPE.values_by_number[msg.type].name
print messageType, datetime.datetime.now(),
if msg.type == AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE:
print "::", msg.data_message.data
else:
print ""
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], "host port"
exit(-1)
deviceName = "device:test/" + uuid.uuid1().hex
userName = "user:test/" + uuid.uuid1().hex
connector = AndroidConnector.AndroidConnector(sys.argv[1], int(sys.argv[2]), deviceName, userName, "")
connector.setMessageQueueEnabled(False)
connector.registerMessageCallback(onDataReceived)
try:
connector.start()
connector.waitForAuthentication()
print "Subscribing."
connector.subscribe("application/vnd.edu.vu.isis.ammo.test.TestData")
sequenceNumber = 0
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print "Got ^C... Closing"
reactor.callFromThread(reactor.stop)
except:
print "Unexpected error... dying."
reactor.callFromThread(reactor.stop)
raise
|
<commit_before><commit_msg>Add consumer testdriver (subscribes to a topic and prints what it gets)<commit_after>
|
import AndroidConnector
import uuid
import sys
import time
import datetime
import AmmoMessages_pb2
from twisted.internet import reactor
def onDataReceived(connector, msg):
messageType = AmmoMessages_pb2._MESSAGEWRAPPER_MESSAGETYPE.values_by_number[msg.type].name
print messageType, datetime.datetime.now(),
if msg.type == AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE:
print "::", msg.data_message.data
else:
print ""
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], "host port"
exit(-1)
deviceName = "device:test/" + uuid.uuid1().hex
userName = "user:test/" + uuid.uuid1().hex
connector = AndroidConnector.AndroidConnector(sys.argv[1], int(sys.argv[2]), deviceName, userName, "")
connector.setMessageQueueEnabled(False)
connector.registerMessageCallback(onDataReceived)
try:
connector.start()
connector.waitForAuthentication()
print "Subscribing."
connector.subscribe("application/vnd.edu.vu.isis.ammo.test.TestData")
sequenceNumber = 0
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print "Got ^C... Closing"
reactor.callFromThread(reactor.stop)
except:
print "Unexpected error... dying."
reactor.callFromThread(reactor.stop)
raise
|
Add consumer testdriver (subscribes to a topic and prints what it gets)import AndroidConnector
import uuid
import sys
import time
import datetime
import AmmoMessages_pb2
from twisted.internet import reactor
def onDataReceived(connector, msg):
messageType = AmmoMessages_pb2._MESSAGEWRAPPER_MESSAGETYPE.values_by_number[msg.type].name
print messageType, datetime.datetime.now(),
if msg.type == AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE:
print "::", msg.data_message.data
else:
print ""
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], "host port"
exit(-1)
deviceName = "device:test/" + uuid.uuid1().hex
userName = "user:test/" + uuid.uuid1().hex
connector = AndroidConnector.AndroidConnector(sys.argv[1], int(sys.argv[2]), deviceName, userName, "")
connector.setMessageQueueEnabled(False)
connector.registerMessageCallback(onDataReceived)
try:
connector.start()
connector.waitForAuthentication()
print "Subscribing."
connector.subscribe("application/vnd.edu.vu.isis.ammo.test.TestData")
sequenceNumber = 0
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print "Got ^C... Closing"
reactor.callFromThread(reactor.stop)
except:
print "Unexpected error... dying."
reactor.callFromThread(reactor.stop)
raise
|
<commit_before><commit_msg>Add consumer testdriver (subscribes to a topic and prints what it gets)<commit_after>import AndroidConnector
import uuid
import sys
import time
import datetime
import AmmoMessages_pb2
from twisted.internet import reactor
def onDataReceived(connector, msg):
messageType = AmmoMessages_pb2._MESSAGEWRAPPER_MESSAGETYPE.values_by_number[msg.type].name
print messageType, datetime.datetime.now(),
if msg.type == AmmoMessages_pb2.MessageWrapper.DATA_MESSAGE:
print "::", msg.data_message.data
else:
print ""
if __name__ == "__main__":
print "Android Gateway Tester"
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], "host port"
exit(-1)
deviceName = "device:test/" + uuid.uuid1().hex
userName = "user:test/" + uuid.uuid1().hex
connector = AndroidConnector.AndroidConnector(sys.argv[1], int(sys.argv[2]), deviceName, userName, "")
connector.setMessageQueueEnabled(False)
connector.registerMessageCallback(onDataReceived)
try:
connector.start()
connector.waitForAuthentication()
print "Subscribing."
connector.subscribe("application/vnd.edu.vu.isis.ammo.test.TestData")
sequenceNumber = 0
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print "Got ^C... Closing"
reactor.callFromThread(reactor.stop)
except:
print "Unexpected error... dying."
reactor.callFromThread(reactor.stop)
raise
|
|
5e60a3ffb8d008e1bc9ed0045ef2a927f8854598
|
tool/corpus_tools/download_pub_packages.py
|
tool/corpus_tools/download_pub_packages.py
|
# Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import re
import shutil
import subprocess
import requests
PACKAGE_URL = re.compile(r'http://pub.dartlang.org/packages/(\w+).json')
PUBSPEC = '''
name: %s
dependencies:
%s: %s
'''
ARCHIVE_URL = "https://commondatastorage.googleapis.com/pub.dartlang.org/packages/{}-{}.tar.gz"
package_urls = []
# Download the full list of package names.
print 'Downloading package lists:'
url = 'http://pub.dartlang.org/packages.json'
while True:
print '-', url
data = requests.get(url).json()
for package in data['packages']:
package_urls.append(package)
url = data['next']
if not url: break
print
print 'Found', len(package_urls), 'packages'
if os.path.exists('out'):
shutil.rmtree('out')
os.mkdir('out')
# Download the archive of the most recent version of each package.
for package_url in package_urls:
data = requests.get(package_url).json()
name = data['name']
version = data['versions'][-1]
print name, version
# Download the archive.
archive_url = ARCHIVE_URL.format(name, version)
tar_file = 'out/{}-{}.tar'.format(name, version)
with open(tar_file, 'wb') as file:
file.write(requests.get(archive_url).content)
# Extract it.
extract_dir = 'out/{}-{}'.format(name, version)
os.mkdir(extract_dir)
subprocess.call(['tar', '-xf', tar_file, '-C', extract_dir])
|
Add tool for downloading pub packages
|
Add tool for downloading pub packages
|
Python
|
bsd-3-clause
|
dart-archive/smart,dart-archive/smart,dart-archive/smart
|
Add tool for downloading pub packages
|
# Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import re
import shutil
import subprocess
import requests
PACKAGE_URL = re.compile(r'http://pub.dartlang.org/packages/(\w+).json')
PUBSPEC = '''
name: %s
dependencies:
%s: %s
'''
ARCHIVE_URL = "https://commondatastorage.googleapis.com/pub.dartlang.org/packages/{}-{}.tar.gz"
package_urls = []
# Download the full list of package names.
print 'Downloading package lists:'
url = 'http://pub.dartlang.org/packages.json'
while True:
print '-', url
data = requests.get(url).json()
for package in data['packages']:
package_urls.append(package)
url = data['next']
if not url: break
print
print 'Found', len(package_urls), 'packages'
if os.path.exists('out'):
shutil.rmtree('out')
os.mkdir('out')
# Download the archive of the most recent version of each package.
for package_url in package_urls:
data = requests.get(package_url).json()
name = data['name']
version = data['versions'][-1]
print name, version
# Download the archive.
archive_url = ARCHIVE_URL.format(name, version)
tar_file = 'out/{}-{}.tar'.format(name, version)
with open(tar_file, 'wb') as file:
file.write(requests.get(archive_url).content)
# Extract it.
extract_dir = 'out/{}-{}'.format(name, version)
os.mkdir(extract_dir)
subprocess.call(['tar', '-xf', tar_file, '-C', extract_dir])
|
<commit_before><commit_msg>Add tool for downloading pub packages<commit_after>
|
# Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import re
import shutil
import subprocess
import requests
PACKAGE_URL = re.compile(r'http://pub.dartlang.org/packages/(\w+).json')
PUBSPEC = '''
name: %s
dependencies:
%s: %s
'''
ARCHIVE_URL = "https://commondatastorage.googleapis.com/pub.dartlang.org/packages/{}-{}.tar.gz"
package_urls = []
# Download the full list of package names.
print 'Downloading package lists:'
url = 'http://pub.dartlang.org/packages.json'
while True:
print '-', url
data = requests.get(url).json()
for package in data['packages']:
package_urls.append(package)
url = data['next']
if not url: break
print
print 'Found', len(package_urls), 'packages'
if os.path.exists('out'):
shutil.rmtree('out')
os.mkdir('out')
# Download the archive of the most recent version of each package.
for package_url in package_urls:
data = requests.get(package_url).json()
name = data['name']
version = data['versions'][-1]
print name, version
# Download the archive.
archive_url = ARCHIVE_URL.format(name, version)
tar_file = 'out/{}-{}.tar'.format(name, version)
with open(tar_file, 'wb') as file:
file.write(requests.get(archive_url).content)
# Extract it.
extract_dir = 'out/{}-{}'.format(name, version)
os.mkdir(extract_dir)
subprocess.call(['tar', '-xf', tar_file, '-C', extract_dir])
|
Add tool for downloading pub packages# Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import re
import shutil
import subprocess
import requests
PACKAGE_URL = re.compile(r'http://pub.dartlang.org/packages/(\w+).json')
PUBSPEC = '''
name: %s
dependencies:
%s: %s
'''
ARCHIVE_URL = "https://commondatastorage.googleapis.com/pub.dartlang.org/packages/{}-{}.tar.gz"
package_urls = []
# Download the full list of package names.
print 'Downloading package lists:'
url = 'http://pub.dartlang.org/packages.json'
while True:
print '-', url
data = requests.get(url).json()
for package in data['packages']:
package_urls.append(package)
url = data['next']
if not url: break
print
print 'Found', len(package_urls), 'packages'
if os.path.exists('out'):
shutil.rmtree('out')
os.mkdir('out')
# Download the archive of the most recent version of each package.
for package_url in package_urls:
data = requests.get(package_url).json()
name = data['name']
version = data['versions'][-1]
print name, version
# Download the archive.
archive_url = ARCHIVE_URL.format(name, version)
tar_file = 'out/{}-{}.tar'.format(name, version)
with open(tar_file, 'wb') as file:
file.write(requests.get(archive_url).content)
# Extract it.
extract_dir = 'out/{}-{}'.format(name, version)
os.mkdir(extract_dir)
subprocess.call(['tar', '-xf', tar_file, '-C', extract_dir])
|
<commit_before><commit_msg>Add tool for downloading pub packages<commit_after># Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import re
import shutil
import subprocess
import requests
PACKAGE_URL = re.compile(r'http://pub.dartlang.org/packages/(\w+).json')
PUBSPEC = '''
name: %s
dependencies:
%s: %s
'''
ARCHIVE_URL = "https://commondatastorage.googleapis.com/pub.dartlang.org/packages/{}-{}.tar.gz"
package_urls = []
# Download the full list of package names.
print 'Downloading package lists:'
url = 'http://pub.dartlang.org/packages.json'
while True:
print '-', url
data = requests.get(url).json()
for package in data['packages']:
package_urls.append(package)
url = data['next']
if not url: break
print
print 'Found', len(package_urls), 'packages'
if os.path.exists('out'):
shutil.rmtree('out')
os.mkdir('out')
# Download the archive of the most recent version of each package.
for package_url in package_urls:
data = requests.get(package_url).json()
name = data['name']
version = data['versions'][-1]
print name, version
# Download the archive.
archive_url = ARCHIVE_URL.format(name, version)
tar_file = 'out/{}-{}.tar'.format(name, version)
with open(tar_file, 'wb') as file:
file.write(requests.get(archive_url).content)
# Extract it.
extract_dir = 'out/{}-{}'.format(name, version)
os.mkdir(extract_dir)
subprocess.call(['tar', '-xf', tar_file, '-C', extract_dir])
|
|
bf957e66274911686631708a88b701735e036227
|
tests/pytests/integration/ssh/test_state.py
|
tests/pytests/integration/ssh/test_state.py
|
import pytest
pytestmark = [
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
]
@pytest.fixture(scope="module")
def state_tree(base_env_state_tree_root_dir):
top_file = """
base:
'localhost':
- basic
'127.0.0.1':
- basic
"""
map_file = """
{%- set abc = "def" %}
"""
state_file = """
{%- from "map.jinja" import abc with context %}
Ok with {{ abc }}:
test.succeed_without_changes
"""
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_file, base_env_state_tree_root_dir
)
map_tempfile = pytest.helpers.temp_file(
"map.jinja", map_file, base_env_state_tree_root_dir
)
state_tempfile = pytest.helpers.temp_file(
"test.sls", state_file, base_env_state_tree_root_dir
)
with top_tempfile, map_tempfile, state_tempfile:
yield
@pytest.mark.slow_test
def test_state_with_import(salt_ssh_cli, state_tree):
"""
verify salt-ssh can use imported map files in states
"""
ret = salt_ssh_cli.run("state.sls", "test")
assert ret.exitcode == 0
assert ret.json
|
Verify salt-ssh can import from map files in states
|
Verify salt-ssh can import from map files in states
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Verify salt-ssh can import from map files in states
|
import pytest
pytestmark = [
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
]
@pytest.fixture(scope="module")
def state_tree(base_env_state_tree_root_dir):
top_file = """
base:
'localhost':
- basic
'127.0.0.1':
- basic
"""
map_file = """
{%- set abc = "def" %}
"""
state_file = """
{%- from "map.jinja" import abc with context %}
Ok with {{ abc }}:
test.succeed_without_changes
"""
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_file, base_env_state_tree_root_dir
)
map_tempfile = pytest.helpers.temp_file(
"map.jinja", map_file, base_env_state_tree_root_dir
)
state_tempfile = pytest.helpers.temp_file(
"test.sls", state_file, base_env_state_tree_root_dir
)
with top_tempfile, map_tempfile, state_tempfile:
yield
@pytest.mark.slow_test
def test_state_with_import(salt_ssh_cli, state_tree):
"""
verify salt-ssh can use imported map files in states
"""
ret = salt_ssh_cli.run("state.sls", "test")
assert ret.exitcode == 0
assert ret.json
|
<commit_before><commit_msg>Verify salt-ssh can import from map files in states<commit_after>
|
import pytest
pytestmark = [
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
]
@pytest.fixture(scope="module")
def state_tree(base_env_state_tree_root_dir):
top_file = """
base:
'localhost':
- basic
'127.0.0.1':
- basic
"""
map_file = """
{%- set abc = "def" %}
"""
state_file = """
{%- from "map.jinja" import abc with context %}
Ok with {{ abc }}:
test.succeed_without_changes
"""
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_file, base_env_state_tree_root_dir
)
map_tempfile = pytest.helpers.temp_file(
"map.jinja", map_file, base_env_state_tree_root_dir
)
state_tempfile = pytest.helpers.temp_file(
"test.sls", state_file, base_env_state_tree_root_dir
)
with top_tempfile, map_tempfile, state_tempfile:
yield
@pytest.mark.slow_test
def test_state_with_import(salt_ssh_cli, state_tree):
"""
verify salt-ssh can use imported map files in states
"""
ret = salt_ssh_cli.run("state.sls", "test")
assert ret.exitcode == 0
assert ret.json
|
Verify salt-ssh can import from map files in statesimport pytest
pytestmark = [
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
]
@pytest.fixture(scope="module")
def state_tree(base_env_state_tree_root_dir):
top_file = """
base:
'localhost':
- basic
'127.0.0.1':
- basic
"""
map_file = """
{%- set abc = "def" %}
"""
state_file = """
{%- from "map.jinja" import abc with context %}
Ok with {{ abc }}:
test.succeed_without_changes
"""
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_file, base_env_state_tree_root_dir
)
map_tempfile = pytest.helpers.temp_file(
"map.jinja", map_file, base_env_state_tree_root_dir
)
state_tempfile = pytest.helpers.temp_file(
"test.sls", state_file, base_env_state_tree_root_dir
)
with top_tempfile, map_tempfile, state_tempfile:
yield
@pytest.mark.slow_test
def test_state_with_import(salt_ssh_cli, state_tree):
"""
verify salt-ssh can use imported map files in states
"""
ret = salt_ssh_cli.run("state.sls", "test")
assert ret.exitcode == 0
assert ret.json
|
<commit_before><commit_msg>Verify salt-ssh can import from map files in states<commit_after>import pytest
pytestmark = [
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
]
@pytest.fixture(scope="module")
def state_tree(base_env_state_tree_root_dir):
top_file = """
base:
'localhost':
- basic
'127.0.0.1':
- basic
"""
map_file = """
{%- set abc = "def" %}
"""
state_file = """
{%- from "map.jinja" import abc with context %}
Ok with {{ abc }}:
test.succeed_without_changes
"""
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_file, base_env_state_tree_root_dir
)
map_tempfile = pytest.helpers.temp_file(
"map.jinja", map_file, base_env_state_tree_root_dir
)
state_tempfile = pytest.helpers.temp_file(
"test.sls", state_file, base_env_state_tree_root_dir
)
with top_tempfile, map_tempfile, state_tempfile:
yield
@pytest.mark.slow_test
def test_state_with_import(salt_ssh_cli, state_tree):
"""
verify salt-ssh can use imported map files in states
"""
ret = salt_ssh_cli.run("state.sls", "test")
assert ret.exitcode == 0
assert ret.json
|
|
559fc56211fb85badfe73090be2a8103fe46ba40
|
genhosts.py
|
genhosts.py
|
#!/usr/bin/env python
import socket
import sys
import yaml
from boto3.session import Session
def genhosts(elbip, sysdomain, outfile=sys.stdout):
SYS_PREFIXES = [
'console',
'uaa',
'apps',
'login']
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
print >>outfile, elbip, sysdomain
for prefix in SYS_PREFIXES:
print >>outfile, elbip, prefix+"."+sysdomain
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
def get_elbip(elb, stackname):
lbname = stackname + "-pcf-elb"
resp = elb.describe_load_balancers(
LoadBalancerNames=[lbname])
if len(resp.get('LoadBalancerDescriptions', [])) == 0:
raise Exception(lbname + " Loadbalacer could not be found")
dnsname = resp['LoadBalancerDescriptions'][0]['DNSName']
return socket.gethostbyname(dnsname)
def get_args():
import argparse
argp = argparse.ArgumentParser()
argp.add_argument('--profile')
argp.add_argument('--stack-name')
argp.add_argument('--outfile')
argp.add_argument('--prepared-cfg')
argp.add_argument('--system-domain')
argp.add_argument('--region', default='us-east-1')
return argp
def fix_args(args):
if args.prepared_cfg is not None:
opts = yaml.load(open(args.prepared_cfg, 'rt'))
args.system_domain = args.system_domain or opts["system_domain"]
args.stack_name = args.stack_name or opts["stack-name"]
args.region = opts["region"]
if args.outfile is not None:
args.outfile = open(args.outfile, "wt")
def main(argv):
args = get_args().parse_args(argv)
if args.prepared_cfg is None and\
args.system_domain is None:
print ("Either --prepared-cfg or "
"(--system-domain and --stack-name) are required")
return -1
fix_args(args)
session = Session(profile_name=args.profile, region_name=args.region)
elb = session.client("elb")
genhosts(
get_elbip(elb, args.stack_name),
args.system_domain,
args.outfile)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
|
Add ability to generate correct hosts file
|
Add ability to generate correct hosts file
|
Python
|
apache-2.0
|
mandarjog/cfawsinit
|
Add ability to generate correct hosts file
|
#!/usr/bin/env python
import socket
import sys
import yaml
from boto3.session import Session
def genhosts(elbip, sysdomain, outfile=sys.stdout):
SYS_PREFIXES = [
'console',
'uaa',
'apps',
'login']
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
print >>outfile, elbip, sysdomain
for prefix in SYS_PREFIXES:
print >>outfile, elbip, prefix+"."+sysdomain
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
def get_elbip(elb, stackname):
lbname = stackname + "-pcf-elb"
resp = elb.describe_load_balancers(
LoadBalancerNames=[lbname])
if len(resp.get('LoadBalancerDescriptions', [])) == 0:
raise Exception(lbname + " Loadbalacer could not be found")
dnsname = resp['LoadBalancerDescriptions'][0]['DNSName']
return socket.gethostbyname(dnsname)
def get_args():
import argparse
argp = argparse.ArgumentParser()
argp.add_argument('--profile')
argp.add_argument('--stack-name')
argp.add_argument('--outfile')
argp.add_argument('--prepared-cfg')
argp.add_argument('--system-domain')
argp.add_argument('--region', default='us-east-1')
return argp
def fix_args(args):
if args.prepared_cfg is not None:
opts = yaml.load(open(args.prepared_cfg, 'rt'))
args.system_domain = args.system_domain or opts["system_domain"]
args.stack_name = args.stack_name or opts["stack-name"]
args.region = opts["region"]
if args.outfile is not None:
args.outfile = open(args.outfile, "wt")
def main(argv):
args = get_args().parse_args(argv)
if args.prepared_cfg is None and\
args.system_domain is None:
print ("Either --prepared-cfg or "
"(--system-domain and --stack-name) are required")
return -1
fix_args(args)
session = Session(profile_name=args.profile, region_name=args.region)
elb = session.client("elb")
genhosts(
get_elbip(elb, args.stack_name),
args.system_domain,
args.outfile)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add ability to generate correct hosts file<commit_after>
|
#!/usr/bin/env python
import socket
import sys
import yaml
from boto3.session import Session
def genhosts(elbip, sysdomain, outfile=sys.stdout):
SYS_PREFIXES = [
'console',
'uaa',
'apps',
'login']
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
print >>outfile, elbip, sysdomain
for prefix in SYS_PREFIXES:
print >>outfile, elbip, prefix+"."+sysdomain
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
def get_elbip(elb, stackname):
lbname = stackname + "-pcf-elb"
resp = elb.describe_load_balancers(
LoadBalancerNames=[lbname])
if len(resp.get('LoadBalancerDescriptions', [])) == 0:
raise Exception(lbname + " Loadbalacer could not be found")
dnsname = resp['LoadBalancerDescriptions'][0]['DNSName']
return socket.gethostbyname(dnsname)
def get_args():
import argparse
argp = argparse.ArgumentParser()
argp.add_argument('--profile')
argp.add_argument('--stack-name')
argp.add_argument('--outfile')
argp.add_argument('--prepared-cfg')
argp.add_argument('--system-domain')
argp.add_argument('--region', default='us-east-1')
return argp
def fix_args(args):
if args.prepared_cfg is not None:
opts = yaml.load(open(args.prepared_cfg, 'rt'))
args.system_domain = args.system_domain or opts["system_domain"]
args.stack_name = args.stack_name or opts["stack-name"]
args.region = opts["region"]
if args.outfile is not None:
args.outfile = open(args.outfile, "wt")
def main(argv):
args = get_args().parse_args(argv)
if args.prepared_cfg is None and\
args.system_domain is None:
print ("Either --prepared-cfg or "
"(--system-domain and --stack-name) are required")
return -1
fix_args(args)
session = Session(profile_name=args.profile, region_name=args.region)
elb = session.client("elb")
genhosts(
get_elbip(elb, args.stack_name),
args.system_domain,
args.outfile)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
|
Add ability to generate correct hosts file#!/usr/bin/env python
import socket
import sys
import yaml
from boto3.session import Session
def genhosts(elbip, sysdomain, outfile=sys.stdout):
SYS_PREFIXES = [
'console',
'uaa',
'apps',
'login']
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
print >>outfile, elbip, sysdomain
for prefix in SYS_PREFIXES:
print >>outfile, elbip, prefix+"."+sysdomain
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
def get_elbip(elb, stackname):
lbname = stackname + "-pcf-elb"
resp = elb.describe_load_balancers(
LoadBalancerNames=[lbname])
if len(resp.get('LoadBalancerDescriptions', [])) == 0:
raise Exception(lbname + " Loadbalacer could not be found")
dnsname = resp['LoadBalancerDescriptions'][0]['DNSName']
return socket.gethostbyname(dnsname)
def get_args():
import argparse
argp = argparse.ArgumentParser()
argp.add_argument('--profile')
argp.add_argument('--stack-name')
argp.add_argument('--outfile')
argp.add_argument('--prepared-cfg')
argp.add_argument('--system-domain')
argp.add_argument('--region', default='us-east-1')
return argp
def fix_args(args):
if args.prepared_cfg is not None:
opts = yaml.load(open(args.prepared_cfg, 'rt'))
args.system_domain = args.system_domain or opts["system_domain"]
args.stack_name = args.stack_name or opts["stack-name"]
args.region = opts["region"]
if args.outfile is not None:
args.outfile = open(args.outfile, "wt")
def main(argv):
args = get_args().parse_args(argv)
if args.prepared_cfg is None and\
args.system_domain is None:
print ("Either --prepared-cfg or "
"(--system-domain and --stack-name) are required")
return -1
fix_args(args)
session = Session(profile_name=args.profile, region_name=args.region)
elb = session.client("elb")
genhosts(
get_elbip(elb, args.stack_name),
args.system_domain,
args.outfile)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add ability to generate correct hosts file<commit_after>#!/usr/bin/env python
import socket
import sys
import yaml
from boto3.session import Session
def genhosts(elbip, sysdomain, outfile=sys.stdout):
SYS_PREFIXES = [
'console',
'uaa',
'apps',
'login']
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
print >>outfile, elbip, sysdomain
for prefix in SYS_PREFIXES:
print >>outfile, elbip, prefix+"."+sysdomain
print >>outfile, "#"*16, "Generated for /etc/hosts by cfawsinit", "#"*16
def get_elbip(elb, stackname):
lbname = stackname + "-pcf-elb"
resp = elb.describe_load_balancers(
LoadBalancerNames=[lbname])
if len(resp.get('LoadBalancerDescriptions', [])) == 0:
raise Exception(lbname + " Loadbalacer could not be found")
dnsname = resp['LoadBalancerDescriptions'][0]['DNSName']
return socket.gethostbyname(dnsname)
def get_args():
import argparse
argp = argparse.ArgumentParser()
argp.add_argument('--profile')
argp.add_argument('--stack-name')
argp.add_argument('--outfile')
argp.add_argument('--prepared-cfg')
argp.add_argument('--system-domain')
argp.add_argument('--region', default='us-east-1')
return argp
def fix_args(args):
if args.prepared_cfg is not None:
opts = yaml.load(open(args.prepared_cfg, 'rt'))
args.system_domain = args.system_domain or opts["system_domain"]
args.stack_name = args.stack_name or opts["stack-name"]
args.region = opts["region"]
if args.outfile is not None:
args.outfile = open(args.outfile, "wt")
def main(argv):
args = get_args().parse_args(argv)
if args.prepared_cfg is None and\
args.system_domain is None:
print ("Either --prepared-cfg or "
"(--system-domain and --stack-name) are required")
return -1
fix_args(args)
session = Session(profile_name=args.profile, region_name=args.region)
elb = session.client("elb")
genhosts(
get_elbip(elb, args.stack_name),
args.system_domain,
args.outfile)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
|
|
838f85c604c2c320051aa69838b01ca8b07c7f8b
|
integration_tests/directory/test_data_source.py
|
integration_tests/directory/test_data_source.py
|
from contextlib import closing
from minerva.directory import DataSource
from minerva.test import with_conn
@with_conn()
def test_create_data_source(conn):
with closing(conn.cursor()) as cursor:
data_source = DataSource.create(
"TestSource", "short description of data source"
)(cursor)
assert data_source.id is not None
assert data_source.name == "TestSource"
|
Add basic DataSource integration test
|
Add basic DataSource integration test
|
Python
|
agpl-3.0
|
hendrikx-itc/minerva,hendrikx-itc/minerva
|
Add basic DataSource integration test
|
from contextlib import closing
from minerva.directory import DataSource
from minerva.test import with_conn
@with_conn()
def test_create_data_source(conn):
with closing(conn.cursor()) as cursor:
data_source = DataSource.create(
"TestSource", "short description of data source"
)(cursor)
assert data_source.id is not None
assert data_source.name == "TestSource"
|
<commit_before><commit_msg>Add basic DataSource integration test<commit_after>
|
from contextlib import closing
from minerva.directory import DataSource
from minerva.test import with_conn
@with_conn()
def test_create_data_source(conn):
with closing(conn.cursor()) as cursor:
data_source = DataSource.create(
"TestSource", "short description of data source"
)(cursor)
assert data_source.id is not None
assert data_source.name == "TestSource"
|
Add basic DataSource integration testfrom contextlib import closing
from minerva.directory import DataSource
from minerva.test import with_conn
@with_conn()
def test_create_data_source(conn):
with closing(conn.cursor()) as cursor:
data_source = DataSource.create(
"TestSource", "short description of data source"
)(cursor)
assert data_source.id is not None
assert data_source.name == "TestSource"
|
<commit_before><commit_msg>Add basic DataSource integration test<commit_after>from contextlib import closing
from minerva.directory import DataSource
from minerva.test import with_conn
@with_conn()
def test_create_data_source(conn):
with closing(conn.cursor()) as cursor:
data_source = DataSource.create(
"TestSource", "short description of data source"
)(cursor)
assert data_source.id is not None
assert data_source.name == "TestSource"
|
|
3d62a8e177cbeaa9acc1ceea57a6781a8180166e
|
reminder/access_list.py
|
reminder/access_list.py
|
from pyramid.security import (
Allow,
Everyone
)
class RootFactory(object):
"""
Set up what permissions groups have.
"""
__acl__ = [
(Allow, 'users', 'view'),
(Allow, 'users', 'edit'),
(Allow, 'admins', 'admin')
]
def __init__(self, request):
pass
|
Access list in seperate module
|
Access list in seperate module
|
Python
|
mit
|
haydenashton/reminders,haydenashton/reminders
|
Access list in seperate module
|
from pyramid.security import (
Allow,
Everyone
)
class RootFactory(object):
"""
Set up what permissions groups have.
"""
__acl__ = [
(Allow, 'users', 'view'),
(Allow, 'users', 'edit'),
(Allow, 'admins', 'admin')
]
def __init__(self, request):
pass
|
<commit_before><commit_msg>Access list in seperate module<commit_after>
|
from pyramid.security import (
Allow,
Everyone
)
class RootFactory(object):
"""
Set up what permissions groups have.
"""
__acl__ = [
(Allow, 'users', 'view'),
(Allow, 'users', 'edit'),
(Allow, 'admins', 'admin')
]
def __init__(self, request):
pass
|
Access list in seperate modulefrom pyramid.security import (
Allow,
Everyone
)
class RootFactory(object):
"""
Set up what permissions groups have.
"""
__acl__ = [
(Allow, 'users', 'view'),
(Allow, 'users', 'edit'),
(Allow, 'admins', 'admin')
]
def __init__(self, request):
pass
|
<commit_before><commit_msg>Access list in seperate module<commit_after>from pyramid.security import (
Allow,
Everyone
)
class RootFactory(object):
"""
Set up what permissions groups have.
"""
__acl__ = [
(Allow, 'users', 'view'),
(Allow, 'users', 'edit'),
(Allow, 'admins', 'admin')
]
def __init__(self, request):
pass
|
|
d6a4f3149739ef35bed57c61ebaacfabeb51e46a
|
etrago/cluster/analyses/objective_function.py
|
etrago/cluster/analyses/objective_function.py
|
# -*- coding: utf-8 -*-
"""
"""
from os import path, listdir
import matplotlib
import pandas as pd
results_dir = 'snapshot-clustering-results-k10-noDailyBounds'
clustered_path = path.join('/home/simnh/pf_results', results_dir, 'daily')
original_path = path.join('/home/simnh/pf_results', results_dir, 'original')
network = pd.read_csv(path.join(original_path, 'network.csv'))
abs_err = {}
rel_err = {}
for c in listdir(clustered_path):
if c != 'Z.csv':
network_c = pd.read_csv(path.join(clustered_path, c, 'network.csv'))
abs_err[str(c)] = (abs(network_c['objective'].values[0] -
network['objective'].values[0]))
rel_err[str(c)] = abs_err[str(c)] / network['objective'].values[0]
errors = pd.DataFrame({'abs_err': abs_err, 'rel_err': rel_err})
errors.index = [int(i) for i in errors.index]
errors.sort_index(inplace=True)
errors['rel_err'].plot(style='--*')
|
Add code for objective function error
|
Add code for objective function error
|
Python
|
agpl-3.0
|
openego/eTraGo
|
Add code for objective function error
|
# -*- coding: utf-8 -*-
"""
"""
from os import path, listdir
import matplotlib
import pandas as pd
results_dir = 'snapshot-clustering-results-k10-noDailyBounds'
clustered_path = path.join('/home/simnh/pf_results', results_dir, 'daily')
original_path = path.join('/home/simnh/pf_results', results_dir, 'original')
network = pd.read_csv(path.join(original_path, 'network.csv'))
abs_err = {}
rel_err = {}
for c in listdir(clustered_path):
if c != 'Z.csv':
network_c = pd.read_csv(path.join(clustered_path, c, 'network.csv'))
abs_err[str(c)] = (abs(network_c['objective'].values[0] -
network['objective'].values[0]))
rel_err[str(c)] = abs_err[str(c)] / network['objective'].values[0]
errors = pd.DataFrame({'abs_err': abs_err, 'rel_err': rel_err})
errors.index = [int(i) for i in errors.index]
errors.sort_index(inplace=True)
errors['rel_err'].plot(style='--*')
|
<commit_before><commit_msg>Add code for objective function error<commit_after>
|
# -*- coding: utf-8 -*-
"""
"""
from os import path, listdir
import matplotlib
import pandas as pd
results_dir = 'snapshot-clustering-results-k10-noDailyBounds'
clustered_path = path.join('/home/simnh/pf_results', results_dir, 'daily')
original_path = path.join('/home/simnh/pf_results', results_dir, 'original')
network = pd.read_csv(path.join(original_path, 'network.csv'))
abs_err = {}
rel_err = {}
for c in listdir(clustered_path):
if c != 'Z.csv':
network_c = pd.read_csv(path.join(clustered_path, c, 'network.csv'))
abs_err[str(c)] = (abs(network_c['objective'].values[0] -
network['objective'].values[0]))
rel_err[str(c)] = abs_err[str(c)] / network['objective'].values[0]
errors = pd.DataFrame({'abs_err': abs_err, 'rel_err': rel_err})
errors.index = [int(i) for i in errors.index]
errors.sort_index(inplace=True)
errors['rel_err'].plot(style='--*')
|
Add code for objective function error# -*- coding: utf-8 -*-
"""
"""
from os import path, listdir
import matplotlib
import pandas as pd
results_dir = 'snapshot-clustering-results-k10-noDailyBounds'
clustered_path = path.join('/home/simnh/pf_results', results_dir, 'daily')
original_path = path.join('/home/simnh/pf_results', results_dir, 'original')
network = pd.read_csv(path.join(original_path, 'network.csv'))
abs_err = {}
rel_err = {}
for c in listdir(clustered_path):
if c != 'Z.csv':
network_c = pd.read_csv(path.join(clustered_path, c, 'network.csv'))
abs_err[str(c)] = (abs(network_c['objective'].values[0] -
network['objective'].values[0]))
rel_err[str(c)] = abs_err[str(c)] / network['objective'].values[0]
errors = pd.DataFrame({'abs_err': abs_err, 'rel_err': rel_err})
errors.index = [int(i) for i in errors.index]
errors.sort_index(inplace=True)
errors['rel_err'].plot(style='--*')
|
<commit_before><commit_msg>Add code for objective function error<commit_after># -*- coding: utf-8 -*-
"""
"""
from os import path, listdir
import matplotlib
import pandas as pd
results_dir = 'snapshot-clustering-results-k10-noDailyBounds'
clustered_path = path.join('/home/simnh/pf_results', results_dir, 'daily')
original_path = path.join('/home/simnh/pf_results', results_dir, 'original')
network = pd.read_csv(path.join(original_path, 'network.csv'))
abs_err = {}
rel_err = {}
for c in listdir(clustered_path):
if c != 'Z.csv':
network_c = pd.read_csv(path.join(clustered_path, c, 'network.csv'))
abs_err[str(c)] = (abs(network_c['objective'].values[0] -
network['objective'].values[0]))
rel_err[str(c)] = abs_err[str(c)] / network['objective'].values[0]
errors = pd.DataFrame({'abs_err': abs_err, 'rel_err': rel_err})
errors.index = [int(i) for i in errors.index]
errors.sort_index(inplace=True)
errors['rel_err'].plot(style='--*')
|
|
6cd6a5dbc81548aa823fb5a8e69041c05c71914e
|
cabot/cabotapp/tests/test_plugin_settings.py
|
cabot/cabotapp/tests/test_plugin_settings.py
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.shortcuts import resolve_url
from django.test import TestCase
from mock import patch
from cabot.cabotapp.alert import AlertPlugin
from cabot.cabotapp.models import Service
class PluginSettingsTest(TestCase):
def setUp(self):
self.username = 'testuser'
self.password = 'testuserpassword'
self.user = User.objects.create(username=self.username)
self.user.set_password(self.password)
self.user.save()
self.client.login(username=self.username, password=self.password)
def test_global_settings(self):
resp = self.client.get(resolve_url('plugin-settings-global'), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_settings(self):
plugin = AlertPlugin.objects.first()
resp = self.client.get(resolve_url('plugin-settings', plugin_name=plugin.title), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_disable(self):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('plugin-settings', plugin_name=plugin.title), {'enabled': False}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertIn('Updated Successfully', resp.content)
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_plugin_alert_test(self, fake_send_alert):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('alert-test-plugin'), {'alert_plugin': plugin.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_global_alert_test(self, fake_send_alert):
service = Service.objects.create(
name='Service',
)
plugin = AlertPlugin.objects.first()
service.alerts.add(
plugin
)
resp = self.client.post(resolve_url('alert-test'), {'service': service.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
|
Add tests for plugin settings and alert tests
|
Add tests for plugin settings and alert tests
|
Python
|
mit
|
maks-us/cabot,arachnys/cabot,arachnys/cabot,maks-us/cabot,maks-us/cabot,maks-us/cabot,arachnys/cabot,arachnys/cabot
|
Add tests for plugin settings and alert tests
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.shortcuts import resolve_url
from django.test import TestCase
from mock import patch
from cabot.cabotapp.alert import AlertPlugin
from cabot.cabotapp.models import Service
class PluginSettingsTest(TestCase):
def setUp(self):
self.username = 'testuser'
self.password = 'testuserpassword'
self.user = User.objects.create(username=self.username)
self.user.set_password(self.password)
self.user.save()
self.client.login(username=self.username, password=self.password)
def test_global_settings(self):
resp = self.client.get(resolve_url('plugin-settings-global'), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_settings(self):
plugin = AlertPlugin.objects.first()
resp = self.client.get(resolve_url('plugin-settings', plugin_name=plugin.title), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_disable(self):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('plugin-settings', plugin_name=plugin.title), {'enabled': False}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertIn('Updated Successfully', resp.content)
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_plugin_alert_test(self, fake_send_alert):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('alert-test-plugin'), {'alert_plugin': plugin.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_global_alert_test(self, fake_send_alert):
service = Service.objects.create(
name='Service',
)
plugin = AlertPlugin.objects.first()
service.alerts.add(
plugin
)
resp = self.client.post(resolve_url('alert-test'), {'service': service.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
|
<commit_before><commit_msg>Add tests for plugin settings and alert tests<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.shortcuts import resolve_url
from django.test import TestCase
from mock import patch
from cabot.cabotapp.alert import AlertPlugin
from cabot.cabotapp.models import Service
class PluginSettingsTest(TestCase):
def setUp(self):
self.username = 'testuser'
self.password = 'testuserpassword'
self.user = User.objects.create(username=self.username)
self.user.set_password(self.password)
self.user.save()
self.client.login(username=self.username, password=self.password)
def test_global_settings(self):
resp = self.client.get(resolve_url('plugin-settings-global'), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_settings(self):
plugin = AlertPlugin.objects.first()
resp = self.client.get(resolve_url('plugin-settings', plugin_name=plugin.title), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_disable(self):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('plugin-settings', plugin_name=plugin.title), {'enabled': False}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertIn('Updated Successfully', resp.content)
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_plugin_alert_test(self, fake_send_alert):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('alert-test-plugin'), {'alert_plugin': plugin.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_global_alert_test(self, fake_send_alert):
service = Service.objects.create(
name='Service',
)
plugin = AlertPlugin.objects.first()
service.alerts.add(
plugin
)
resp = self.client.post(resolve_url('alert-test'), {'service': service.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
|
Add tests for plugin settings and alert tests# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.shortcuts import resolve_url
from django.test import TestCase
from mock import patch
from cabot.cabotapp.alert import AlertPlugin
from cabot.cabotapp.models import Service
class PluginSettingsTest(TestCase):
def setUp(self):
self.username = 'testuser'
self.password = 'testuserpassword'
self.user = User.objects.create(username=self.username)
self.user.set_password(self.password)
self.user.save()
self.client.login(username=self.username, password=self.password)
def test_global_settings(self):
resp = self.client.get(resolve_url('plugin-settings-global'), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_settings(self):
plugin = AlertPlugin.objects.first()
resp = self.client.get(resolve_url('plugin-settings', plugin_name=plugin.title), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_disable(self):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('plugin-settings', plugin_name=plugin.title), {'enabled': False}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertIn('Updated Successfully', resp.content)
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_plugin_alert_test(self, fake_send_alert):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('alert-test-plugin'), {'alert_plugin': plugin.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_global_alert_test(self, fake_send_alert):
service = Service.objects.create(
name='Service',
)
plugin = AlertPlugin.objects.first()
service.alerts.add(
plugin
)
resp = self.client.post(resolve_url('alert-test'), {'service': service.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
|
<commit_before><commit_msg>Add tests for plugin settings and alert tests<commit_after># -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.shortcuts import resolve_url
from django.test import TestCase
from mock import patch
from cabot.cabotapp.alert import AlertPlugin
from cabot.cabotapp.models import Service
class PluginSettingsTest(TestCase):
def setUp(self):
self.username = 'testuser'
self.password = 'testuserpassword'
self.user = User.objects.create(username=self.username)
self.user.set_password(self.password)
self.user.save()
self.client.login(username=self.username, password=self.password)
def test_global_settings(self):
resp = self.client.get(resolve_url('plugin-settings-global'), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_settings(self):
plugin = AlertPlugin.objects.first()
resp = self.client.get(resolve_url('plugin-settings', plugin_name=plugin.title), follow=True)
self.assertEqual(resp.status_code, 200)
def test_plugin_disable(self):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('plugin-settings', plugin_name=plugin.title), {'enabled': False}, follow=True)
self.assertEqual(resp.status_code, 200)
self.assertIn('Updated Successfully', resp.content)
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_plugin_alert_test(self, fake_send_alert):
plugin = AlertPlugin.objects.first()
resp = self.client.post(resolve_url('alert-test-plugin'), {'alert_plugin': plugin.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
@patch('cabot.cabotapp.alert.AlertPlugin._send_alert')
def test_global_alert_test(self, fake_send_alert):
service = Service.objects.create(
name='Service',
)
plugin = AlertPlugin.objects.first()
service.alerts.add(
plugin
)
resp = self.client.post(resolve_url('alert-test'), {'service': service.id, 'old_status': 'PASSING', 'new_status': 'ERROR'})
self.assertEqual(resp.status_code, 200)
self.assertIn('ok', resp.content)
fake_send_alert.assert_called()
|
|
235e3933d5a3c0cf4c171af8fb3950d50598c48e
|
pconsc/scripts/trim2jones.py
|
pconsc/scripts/trim2jones.py
|
#!/usr/bin/env python
import sys, os
infilef = sys.argv[1]
infile = open(infilef)
for l in infile:
if '>' in l:
continue
upperseq = ''.join([c for c in l if not c.islower()])
upperseq = upperseq.replace('X', '-')
sys.stdout.write(upperseq)
|
Add a3m to jones parser.
|
Add a3m to jones parser.
|
Python
|
mit
|
ElofssonLab/pcons-fold,MMichel/pcons-fold,ElofssonLab/pcons-fold,MMichel/pcons-fold
|
Add a3m to jones parser.
|
#!/usr/bin/env python
import sys, os
infilef = sys.argv[1]
infile = open(infilef)
for l in infile:
if '>' in l:
continue
upperseq = ''.join([c for c in l if not c.islower()])
upperseq = upperseq.replace('X', '-')
sys.stdout.write(upperseq)
|
<commit_before><commit_msg>Add a3m to jones parser.<commit_after>
|
#!/usr/bin/env python
import sys, os
infilef = sys.argv[1]
infile = open(infilef)
for l in infile:
if '>' in l:
continue
upperseq = ''.join([c for c in l if not c.islower()])
upperseq = upperseq.replace('X', '-')
sys.stdout.write(upperseq)
|
Add a3m to jones parser.#!/usr/bin/env python
import sys, os
infilef = sys.argv[1]
infile = open(infilef)
for l in infile:
if '>' in l:
continue
upperseq = ''.join([c for c in l if not c.islower()])
upperseq = upperseq.replace('X', '-')
sys.stdout.write(upperseq)
|
<commit_before><commit_msg>Add a3m to jones parser.<commit_after>#!/usr/bin/env python
import sys, os
infilef = sys.argv[1]
infile = open(infilef)
for l in infile:
if '>' in l:
continue
upperseq = ''.join([c for c in l if not c.islower()])
upperseq = upperseq.replace('X', '-')
sys.stdout.write(upperseq)
|
|
e7a88fd1f896f2bac5e6c6bf1ced57cb4ed689dc
|
ffdc/plugins/shell_execution.py
|
ffdc/plugins/shell_execution.py
|
import subprocess
def execute_cmd(parms_string, quiet=False):
r"""
Run CLI standard tool or scripts.
Description of variable:
parms_string Command to execute from the current SHELL.
quiet do not print tool error message if True
"""
result = subprocess.run([parms_string],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True)
if result.stderr and not quiet:
print('\n\t\tERROR with %s ' % parms_string)
print('\t\t' + result.stderr)
return result.stdout
|
Add plugin shell execution script
|
Add plugin shell execution script
Change-Id: Ie2444c332e54956c547e74fa71da5a9465c0f8c8
Signed-off-by: George Keishing <bef0a9ecac45fb57611777c8270153994e13fd2e@in.ibm.com>
|
Python
|
apache-2.0
|
openbmc/openbmc-test-automation,openbmc/openbmc-test-automation
|
Add plugin shell execution script
Change-Id: Ie2444c332e54956c547e74fa71da5a9465c0f8c8
Signed-off-by: George Keishing <bef0a9ecac45fb57611777c8270153994e13fd2e@in.ibm.com>
|
import subprocess
def execute_cmd(parms_string, quiet=False):
r"""
Run CLI standard tool or scripts.
Description of variable:
parms_string Command to execute from the current SHELL.
quiet do not print tool error message if True
"""
result = subprocess.run([parms_string],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True)
if result.stderr and not quiet:
print('\n\t\tERROR with %s ' % parms_string)
print('\t\t' + result.stderr)
return result.stdout
|
<commit_before><commit_msg>Add plugin shell execution script
Change-Id: Ie2444c332e54956c547e74fa71da5a9465c0f8c8
Signed-off-by: George Keishing <bef0a9ecac45fb57611777c8270153994e13fd2e@in.ibm.com><commit_after>
|
import subprocess
def execute_cmd(parms_string, quiet=False):
r"""
Run CLI standard tool or scripts.
Description of variable:
parms_string Command to execute from the current SHELL.
quiet do not print tool error message if True
"""
result = subprocess.run([parms_string],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True)
if result.stderr and not quiet:
print('\n\t\tERROR with %s ' % parms_string)
print('\t\t' + result.stderr)
return result.stdout
|
Add plugin shell execution script
Change-Id: Ie2444c332e54956c547e74fa71da5a9465c0f8c8
Signed-off-by: George Keishing <bef0a9ecac45fb57611777c8270153994e13fd2e@in.ibm.com>import subprocess
def execute_cmd(parms_string, quiet=False):
r"""
Run CLI standard tool or scripts.
Description of variable:
parms_string Command to execute from the current SHELL.
quiet do not print tool error message if True
"""
result = subprocess.run([parms_string],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True)
if result.stderr and not quiet:
print('\n\t\tERROR with %s ' % parms_string)
print('\t\t' + result.stderr)
return result.stdout
|
<commit_before><commit_msg>Add plugin shell execution script
Change-Id: Ie2444c332e54956c547e74fa71da5a9465c0f8c8
Signed-off-by: George Keishing <bef0a9ecac45fb57611777c8270153994e13fd2e@in.ibm.com><commit_after>import subprocess
def execute_cmd(parms_string, quiet=False):
r"""
Run CLI standard tool or scripts.
Description of variable:
parms_string Command to execute from the current SHELL.
quiet do not print tool error message if True
"""
result = subprocess.run([parms_string],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True)
if result.stderr and not quiet:
print('\n\t\tERROR with %s ' % parms_string)
print('\t\t' + result.stderr)
return result.stdout
|
|
408bf25432fd7c9b355bc549f18f552d89a02a43
|
pymc/examples/hierarchical_sqlite.py
|
pymc/examples/hierarchical_sqlite.py
|
if __name__ == '__main__': # Avoid loading during tests.
import pymc as pm
import pymc.examples.hierarchical as hier
with hier.model:
trace = pm.sample(3000, hier.step, hier.start, trace='sqlite')
|
Add SQLite backend example for hierarchical.py
|
Add SQLite backend example for hierarchical.py
|
Python
|
apache-2.0
|
kyleam/pymc3,tyarkoni/pymc3,evidation-health/pymc3,jameshensman/pymc3,superbobry/pymc3,LoLab-VU/pymc,CVML/pymc3,hothHowler/pymc3,wanderer2/pymc3,clk8908/pymc3,LoLab-VU/pymc,MCGallaspy/pymc3,hothHowler/pymc3,MichielCottaar/pymc3,CVML/pymc3,Anjum48/pymc3,superbobry/pymc3,Anjum48/pymc3,arunlodhi/pymc3,evidation-health/pymc3,jameshensman/pymc3,clk8908/pymc3,MCGallaspy/pymc3,dhiapet/PyMC3,kyleam/pymc3,dhiapet/PyMC3,kmather73/pymc3,JesseLivezey/pymc3,JesseLivezey/pymc3,tyarkoni/pymc3,kmather73/pymc3,MichielCottaar/pymc3,wanderer2/pymc3,arunlodhi/pymc3
|
Add SQLite backend example for hierarchical.py
|
if __name__ == '__main__': # Avoid loading during tests.
import pymc as pm
import pymc.examples.hierarchical as hier
with hier.model:
trace = pm.sample(3000, hier.step, hier.start, trace='sqlite')
|
<commit_before><commit_msg>Add SQLite backend example for hierarchical.py<commit_after>
|
if __name__ == '__main__': # Avoid loading during tests.
import pymc as pm
import pymc.examples.hierarchical as hier
with hier.model:
trace = pm.sample(3000, hier.step, hier.start, trace='sqlite')
|
Add SQLite backend example for hierarchical.pyif __name__ == '__main__': # Avoid loading during tests.
import pymc as pm
import pymc.examples.hierarchical as hier
with hier.model:
trace = pm.sample(3000, hier.step, hier.start, trace='sqlite')
|
<commit_before><commit_msg>Add SQLite backend example for hierarchical.py<commit_after>if __name__ == '__main__': # Avoid loading during tests.
import pymc as pm
import pymc.examples.hierarchical as hier
with hier.model:
trace = pm.sample(3000, hier.step, hier.start, trace='sqlite')
|
|
8454e45f9964438bb1c3df3b398d32fe3c0f9d6c
|
block/migrations/0015_auto_20161024_1405.py
|
block/migrations/0015_auto_20161024_1405.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 13:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('block', '0014_auto_20161014_1630'),
]
operations = [
migrations.AlterField(
model_name='headerfooter',
name='footer_left',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the left side of the Footer below a heading '),
),
migrations.AlterField(
model_name='headerfooter',
name='footer_right',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the right side of the Footer below a heading '),
),
]
|
Make migrations for footer field type change
|
Make migrations for footer field type change
|
Python
|
apache-2.0
|
pkimber/block,pkimber/block,pkimber/block
|
Make migrations for footer field type change
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 13:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('block', '0014_auto_20161014_1630'),
]
operations = [
migrations.AlterField(
model_name='headerfooter',
name='footer_left',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the left side of the Footer below a heading '),
),
migrations.AlterField(
model_name='headerfooter',
name='footer_right',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the right side of the Footer below a heading '),
),
]
|
<commit_before><commit_msg>Make migrations for footer field type change<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 13:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('block', '0014_auto_20161014_1630'),
]
operations = [
migrations.AlterField(
model_name='headerfooter',
name='footer_left',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the left side of the Footer below a heading '),
),
migrations.AlterField(
model_name='headerfooter',
name='footer_right',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the right side of the Footer below a heading '),
),
]
|
Make migrations for footer field type change# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 13:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('block', '0014_auto_20161014_1630'),
]
operations = [
migrations.AlterField(
model_name='headerfooter',
name='footer_left',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the left side of the Footer below a heading '),
),
migrations.AlterField(
model_name='headerfooter',
name='footer_right',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the right side of the Footer below a heading '),
),
]
|
<commit_before><commit_msg>Make migrations for footer field type change<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-24 13:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('block', '0014_auto_20161014_1630'),
]
operations = [
migrations.AlterField(
model_name='headerfooter',
name='footer_left',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the left side of the Footer below a heading '),
),
migrations.AlterField(
model_name='headerfooter',
name='footer_right',
field=models.TextField(blank=True, help_text='A block of text intended to be shown on the right side of the Footer below a heading '),
),
]
|
|
bb20687fb26daece194aa90f52ce87a87668d040
|
openkamer/management/commands/clean_dossiers.py
|
openkamer/management/commands/clean_dossiers.py
|
import logging
from django.core.management.base import BaseCommand
from document.models import Dossier
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
dossier_ids = Dossier.get_dossier_ids()
if len(dossier_ids) < 1500:
logger.error('Less than 1500 dossiers found, something wrong, abort!')
return
dossiers_to_delete = Dossier.objects.exclude(dossier_id__in=dossier_ids)
logger.info('Deleting ' + str(len(dossiers_to_delete)) + ' dossiers and related items')
dossiers_to_delete.delete()
|
Create command to clean non-wetsvoorstel dossiers
|
Create command to clean non-wetsvoorstel dossiers
|
Python
|
mit
|
openkamer/openkamer,openkamer/openkamer,openkamer/openkamer,openkamer/openkamer
|
Create command to clean non-wetsvoorstel dossiers
|
import logging
from django.core.management.base import BaseCommand
from document.models import Dossier
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
dossier_ids = Dossier.get_dossier_ids()
if len(dossier_ids) < 1500:
logger.error('Less than 1500 dossiers found, something wrong, abort!')
return
dossiers_to_delete = Dossier.objects.exclude(dossier_id__in=dossier_ids)
logger.info('Deleting ' + str(len(dossiers_to_delete)) + ' dossiers and related items')
dossiers_to_delete.delete()
|
<commit_before><commit_msg>Create command to clean non-wetsvoorstel dossiers<commit_after>
|
import logging
from django.core.management.base import BaseCommand
from document.models import Dossier
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
dossier_ids = Dossier.get_dossier_ids()
if len(dossier_ids) < 1500:
logger.error('Less than 1500 dossiers found, something wrong, abort!')
return
dossiers_to_delete = Dossier.objects.exclude(dossier_id__in=dossier_ids)
logger.info('Deleting ' + str(len(dossiers_to_delete)) + ' dossiers and related items')
dossiers_to_delete.delete()
|
Create command to clean non-wetsvoorstel dossiersimport logging
from django.core.management.base import BaseCommand
from document.models import Dossier
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
dossier_ids = Dossier.get_dossier_ids()
if len(dossier_ids) < 1500:
logger.error('Less than 1500 dossiers found, something wrong, abort!')
return
dossiers_to_delete = Dossier.objects.exclude(dossier_id__in=dossier_ids)
logger.info('Deleting ' + str(len(dossiers_to_delete)) + ' dossiers and related items')
dossiers_to_delete.delete()
|
<commit_before><commit_msg>Create command to clean non-wetsvoorstel dossiers<commit_after>import logging
from django.core.management.base import BaseCommand
from document.models import Dossier
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
dossier_ids = Dossier.get_dossier_ids()
if len(dossier_ids) < 1500:
logger.error('Less than 1500 dossiers found, something wrong, abort!')
return
dossiers_to_delete = Dossier.objects.exclude(dossier_id__in=dossier_ids)
logger.info('Deleting ' + str(len(dossiers_to_delete)) + ' dossiers and related items')
dossiers_to_delete.delete()
|
|
5673ced05395c08292df76ceebb4b4b8b876d0a9
|
markers/mark_error_2.py
|
markers/mark_error_2.py
|
"""Marks all fixed errors #2 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "2"
REGEXP = r"""
<\s*/?\s*abbr\s*/\s*>|
<\s*/?\s*b\s*/\s*>|
<\s*/?\s*big\s*/\s*>|
<\s*/?\s*blockquote\s*/\s*>|
<\s*/?\s*center\s*/\s*>|
<\s*/?\s*cite\s*/\s*>|
<\s*/?\s*del\s*/\s*>|
<\s*/?\s*div\s*/\s*>|
<\s*/?\s*em\s*/\s*>|
<\s*/?\s*font\s*/\s*>|
<\s*/?\s*i\s*/\s*>|
<\s*/?\s*p\s*/\s*>|
<\s*/?\s*s\s*/\s*>|
<\s*/?\s*small\s*/\s*>|
<\s*/?\s*span\s*/\s*>|
<\s*/?\s*strike\s*/\s*>|
<\s*/?\s*sub\s*/\s*>|
<\s*/?\s*sup\s*/\s*>|
<\s*/?\s*td\s*/\s*>|
<\s*/?\s*th\s*/\s*>|
<\s*/?\s*tr\s*/\s*>|
<\s*/?\s*tt\s*/\s*>|
<\s*/?\s*u\s*/\s*>|
<br\s*/\s*[^ ]>|
<br[^ ]/>|
<br[^ /]>|
<br\s*/\s*[^ >]|
<br\s*[^ >/]|
<[^ w]br[^/]*\s*>|
</hr>|
<ref><cite>
"""
FLAGS = re.I | re.VERBOSE
def main():
"""Downloads list from server and marks relevant errors as done."""
site = pywikibot.Site()
for line in load_page_list(NUMBER):
page = pywikibot.Page(site, line)
if re.search(REGEXP, page.text, flags=FLAGS) is None:
mark_error_done(NUMBER, page.title())
log(line, success=True)
else:
log(line, success=False)
if __name__ == "__main__":
main()
|
Add marker for 2nd error
|
Add marker for 2nd error
|
Python
|
mit
|
Facenapalm/NapalmBot
|
Add marker for 2nd error
|
"""Marks all fixed errors #2 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "2"
REGEXP = r"""
<\s*/?\s*abbr\s*/\s*>|
<\s*/?\s*b\s*/\s*>|
<\s*/?\s*big\s*/\s*>|
<\s*/?\s*blockquote\s*/\s*>|
<\s*/?\s*center\s*/\s*>|
<\s*/?\s*cite\s*/\s*>|
<\s*/?\s*del\s*/\s*>|
<\s*/?\s*div\s*/\s*>|
<\s*/?\s*em\s*/\s*>|
<\s*/?\s*font\s*/\s*>|
<\s*/?\s*i\s*/\s*>|
<\s*/?\s*p\s*/\s*>|
<\s*/?\s*s\s*/\s*>|
<\s*/?\s*small\s*/\s*>|
<\s*/?\s*span\s*/\s*>|
<\s*/?\s*strike\s*/\s*>|
<\s*/?\s*sub\s*/\s*>|
<\s*/?\s*sup\s*/\s*>|
<\s*/?\s*td\s*/\s*>|
<\s*/?\s*th\s*/\s*>|
<\s*/?\s*tr\s*/\s*>|
<\s*/?\s*tt\s*/\s*>|
<\s*/?\s*u\s*/\s*>|
<br\s*/\s*[^ ]>|
<br[^ ]/>|
<br[^ /]>|
<br\s*/\s*[^ >]|
<br\s*[^ >/]|
<[^ w]br[^/]*\s*>|
</hr>|
<ref><cite>
"""
FLAGS = re.I | re.VERBOSE
def main():
"""Downloads list from server and marks relevant errors as done."""
site = pywikibot.Site()
for line in load_page_list(NUMBER):
page = pywikibot.Page(site, line)
if re.search(REGEXP, page.text, flags=FLAGS) is None:
mark_error_done(NUMBER, page.title())
log(line, success=True)
else:
log(line, success=False)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add marker for 2nd error<commit_after>
|
"""Marks all fixed errors #2 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "2"
REGEXP = r"""
<\s*/?\s*abbr\s*/\s*>|
<\s*/?\s*b\s*/\s*>|
<\s*/?\s*big\s*/\s*>|
<\s*/?\s*blockquote\s*/\s*>|
<\s*/?\s*center\s*/\s*>|
<\s*/?\s*cite\s*/\s*>|
<\s*/?\s*del\s*/\s*>|
<\s*/?\s*div\s*/\s*>|
<\s*/?\s*em\s*/\s*>|
<\s*/?\s*font\s*/\s*>|
<\s*/?\s*i\s*/\s*>|
<\s*/?\s*p\s*/\s*>|
<\s*/?\s*s\s*/\s*>|
<\s*/?\s*small\s*/\s*>|
<\s*/?\s*span\s*/\s*>|
<\s*/?\s*strike\s*/\s*>|
<\s*/?\s*sub\s*/\s*>|
<\s*/?\s*sup\s*/\s*>|
<\s*/?\s*td\s*/\s*>|
<\s*/?\s*th\s*/\s*>|
<\s*/?\s*tr\s*/\s*>|
<\s*/?\s*tt\s*/\s*>|
<\s*/?\s*u\s*/\s*>|
<br\s*/\s*[^ ]>|
<br[^ ]/>|
<br[^ /]>|
<br\s*/\s*[^ >]|
<br\s*[^ >/]|
<[^ w]br[^/]*\s*>|
</hr>|
<ref><cite>
"""
FLAGS = re.I | re.VERBOSE
def main():
"""Downloads list from server and marks relevant errors as done."""
site = pywikibot.Site()
for line in load_page_list(NUMBER):
page = pywikibot.Page(site, line)
if re.search(REGEXP, page.text, flags=FLAGS) is None:
mark_error_done(NUMBER, page.title())
log(line, success=True)
else:
log(line, success=False)
if __name__ == "__main__":
main()
|
Add marker for 2nd error"""Marks all fixed errors #2 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "2"
REGEXP = r"""
<\s*/?\s*abbr\s*/\s*>|
<\s*/?\s*b\s*/\s*>|
<\s*/?\s*big\s*/\s*>|
<\s*/?\s*blockquote\s*/\s*>|
<\s*/?\s*center\s*/\s*>|
<\s*/?\s*cite\s*/\s*>|
<\s*/?\s*del\s*/\s*>|
<\s*/?\s*div\s*/\s*>|
<\s*/?\s*em\s*/\s*>|
<\s*/?\s*font\s*/\s*>|
<\s*/?\s*i\s*/\s*>|
<\s*/?\s*p\s*/\s*>|
<\s*/?\s*s\s*/\s*>|
<\s*/?\s*small\s*/\s*>|
<\s*/?\s*span\s*/\s*>|
<\s*/?\s*strike\s*/\s*>|
<\s*/?\s*sub\s*/\s*>|
<\s*/?\s*sup\s*/\s*>|
<\s*/?\s*td\s*/\s*>|
<\s*/?\s*th\s*/\s*>|
<\s*/?\s*tr\s*/\s*>|
<\s*/?\s*tt\s*/\s*>|
<\s*/?\s*u\s*/\s*>|
<br\s*/\s*[^ ]>|
<br[^ ]/>|
<br[^ /]>|
<br\s*/\s*[^ >]|
<br\s*[^ >/]|
<[^ w]br[^/]*\s*>|
</hr>|
<ref><cite>
"""
FLAGS = re.I | re.VERBOSE
def main():
"""Downloads list from server and marks relevant errors as done."""
site = pywikibot.Site()
for line in load_page_list(NUMBER):
page = pywikibot.Page(site, line)
if re.search(REGEXP, page.text, flags=FLAGS) is None:
mark_error_done(NUMBER, page.title())
log(line, success=True)
else:
log(line, success=False)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add marker for 2nd error<commit_after>"""Marks all fixed errors #2 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "2"
REGEXP = r"""
<\s*/?\s*abbr\s*/\s*>|
<\s*/?\s*b\s*/\s*>|
<\s*/?\s*big\s*/\s*>|
<\s*/?\s*blockquote\s*/\s*>|
<\s*/?\s*center\s*/\s*>|
<\s*/?\s*cite\s*/\s*>|
<\s*/?\s*del\s*/\s*>|
<\s*/?\s*div\s*/\s*>|
<\s*/?\s*em\s*/\s*>|
<\s*/?\s*font\s*/\s*>|
<\s*/?\s*i\s*/\s*>|
<\s*/?\s*p\s*/\s*>|
<\s*/?\s*s\s*/\s*>|
<\s*/?\s*small\s*/\s*>|
<\s*/?\s*span\s*/\s*>|
<\s*/?\s*strike\s*/\s*>|
<\s*/?\s*sub\s*/\s*>|
<\s*/?\s*sup\s*/\s*>|
<\s*/?\s*td\s*/\s*>|
<\s*/?\s*th\s*/\s*>|
<\s*/?\s*tr\s*/\s*>|
<\s*/?\s*tt\s*/\s*>|
<\s*/?\s*u\s*/\s*>|
<br\s*/\s*[^ ]>|
<br[^ ]/>|
<br[^ /]>|
<br\s*/\s*[^ >]|
<br\s*[^ >/]|
<[^ w]br[^/]*\s*>|
</hr>|
<ref><cite>
"""
FLAGS = re.I | re.VERBOSE
def main():
"""Downloads list from server and marks relevant errors as done."""
site = pywikibot.Site()
for line in load_page_list(NUMBER):
page = pywikibot.Page(site, line)
if re.search(REGEXP, page.text, flags=FLAGS) is None:
mark_error_done(NUMBER, page.title())
log(line, success=True)
else:
log(line, success=False)
if __name__ == "__main__":
main()
|
|
15f77deefdd76ec31dd012fe404b85378eb6d785
|
feature-reduction.py
|
feature-reduction.py
|
# Reduce the dimension of the feature vector.
# This script works with tf-idf.csv (produced by tf-idf.py),
# where each document is represented with 5 most important features.
# The task of this script:
# leave only the features that are mentioned at least once.
# Renumber them as 1, 2, 3, 4 etc.
INPUT_FILE = 'tf-idf.csv'
def parse(document):
""" Parse the document string to get the data.
doc = "1,3,17 5:8.03 6:6.33 79:4.47"
=> returns ("1,3,17", [5, 6, 79], [8.03, 6.33, 4.47])
"""
raw_parse = document.split()
classes = raw_parse[0]
features_and_values = [item.split(':') for item in raw_parse[1:]]
features = [int(item[0]) for item in features_and_values]
values = [float(item[1]) for item in features_and_values]
return classes, features, values
def compose(classes, features, values):
""" Opposite to parse: Compose the document string knowing the data.
classes = "1,3,17"; features = [1, 2, 3]
values = [8.03, 6.33, 4.47]
=> returns "1,3,17 1:8.03 2:6.33 3:4.47"
"""
features_and_values = ' '.join([str(features[i]) + ":" + str(values[i]) for i in range(len(features))])
return ' '.join([classes, features_and_values])
# Count unique features
# with open(INPUT_FILE, 'r') as input_file:
# for document in input_file:
# classes, features, values = parse(document)
|
Add utility functions to parse and to compose the document
|
Add utility functions to parse and to compose the document
|
Python
|
mit
|
artem-erofeev/kaggle-lshtc,artem-erofeev/kaggle-lshtc
|
Add utility functions to parse and to compose the document
|
# Reduce the dimension of the feature vector.
# This script works with tf-idf.csv (produced by tf-idf.py),
# where each document is represented with 5 most important features.
# The task of this script:
# leave only the features that are mentioned at least once.
# Renumber them as 1, 2, 3, 4 etc.
INPUT_FILE = 'tf-idf.csv'
def parse(document):
""" Parse the document string to get the data.
doc = "1,3,17 5:8.03 6:6.33 79:4.47"
=> returns ("1,3,17", [5, 6, 79], [8.03, 6.33, 4.47])
"""
raw_parse = document.split()
classes = raw_parse[0]
features_and_values = [item.split(':') for item in raw_parse[1:]]
features = [int(item[0]) for item in features_and_values]
values = [float(item[1]) for item in features_and_values]
return classes, features, values
def compose(classes, features, values):
""" Opposite to parse: Compose the document string knowing the data.
classes = "1,3,17"; features = [1, 2, 3]
values = [8.03, 6.33, 4.47]
=> returns "1,3,17 1:8.03 2:6.33 3:4.47"
"""
features_and_values = ' '.join([str(features[i]) + ":" + str(values[i]) for i in range(len(features))])
return ' '.join([classes, features_and_values])
# Count unique features
# with open(INPUT_FILE, 'r') as input_file:
# for document in input_file:
# classes, features, values = parse(document)
|
<commit_before><commit_msg>Add utility functions to parse and to compose the document<commit_after>
|
# Reduce the dimension of the feature vector.
# This script works with tf-idf.csv (produced by tf-idf.py),
# where each document is represented with 5 most important features.
# The task of this script:
# leave only the features that are mentioned at least once.
# Renumber them as 1, 2, 3, 4 etc.
INPUT_FILE = 'tf-idf.csv'
def parse(document):
""" Parse the document string to get the data.
doc = "1,3,17 5:8.03 6:6.33 79:4.47"
=> returns ("1,3,17", [5, 6, 79], [8.03, 6.33, 4.47])
"""
raw_parse = document.split()
classes = raw_parse[0]
features_and_values = [item.split(':') for item in raw_parse[1:]]
features = [int(item[0]) for item in features_and_values]
values = [float(item[1]) for item in features_and_values]
return classes, features, values
def compose(classes, features, values):
""" Opposite to parse: Compose the document string knowing the data.
classes = "1,3,17"; features = [1, 2, 3]
values = [8.03, 6.33, 4.47]
=> returns "1,3,17 1:8.03 2:6.33 3:4.47"
"""
features_and_values = ' '.join([str(features[i]) + ":" + str(values[i]) for i in range(len(features))])
return ' '.join([classes, features_and_values])
# Count unique features
# with open(INPUT_FILE, 'r') as input_file:
# for document in input_file:
# classes, features, values = parse(document)
|
Add utility functions to parse and to compose the document# Reduce the dimension of the feature vector.
# This script works with tf-idf.csv (produced by tf-idf.py),
# where each document is represented with 5 most important features.
# The task of this script:
# leave only the features that are mentioned at least once.
# Renumber them as 1, 2, 3, 4 etc.
INPUT_FILE = 'tf-idf.csv'
def parse(document):
""" Parse the document string to get the data.
doc = "1,3,17 5:8.03 6:6.33 79:4.47"
=> returns ("1,3,17", [5, 6, 79], [8.03, 6.33, 4.47])
"""
raw_parse = document.split()
classes = raw_parse[0]
features_and_values = [item.split(':') for item in raw_parse[1:]]
features = [int(item[0]) for item in features_and_values]
values = [float(item[1]) for item in features_and_values]
return classes, features, values
def compose(classes, features, values):
""" Opposite to parse: Compose the document string knowing the data.
classes = "1,3,17"; features = [1, 2, 3]
values = [8.03, 6.33, 4.47]
=> returns "1,3,17 1:8.03 2:6.33 3:4.47"
"""
features_and_values = ' '.join([str(features[i]) + ":" + str(values[i]) for i in range(len(features))])
return ' '.join([classes, features_and_values])
# Count unique features
# with open(INPUT_FILE, 'r') as input_file:
# for document in input_file:
# classes, features, values = parse(document)
|
<commit_before><commit_msg>Add utility functions to parse and to compose the document<commit_after># Reduce the dimension of the feature vector.
# This script works with tf-idf.csv (produced by tf-idf.py),
# where each document is represented with 5 most important features.
# The task of this script:
# leave only the features that are mentioned at least once.
# Renumber them as 1, 2, 3, 4 etc.
INPUT_FILE = 'tf-idf.csv'
def parse(document):
""" Parse the document string to get the data.
doc = "1,3,17 5:8.03 6:6.33 79:4.47"
=> returns ("1,3,17", [5, 6, 79], [8.03, 6.33, 4.47])
"""
raw_parse = document.split()
classes = raw_parse[0]
features_and_values = [item.split(':') for item in raw_parse[1:]]
features = [int(item[0]) for item in features_and_values]
values = [float(item[1]) for item in features_and_values]
return classes, features, values
def compose(classes, features, values):
""" Opposite to parse: Compose the document string knowing the data.
classes = "1,3,17"; features = [1, 2, 3]
values = [8.03, 6.33, 4.47]
=> returns "1,3,17 1:8.03 2:6.33 3:4.47"
"""
features_and_values = ' '.join([str(features[i]) + ":" + str(values[i]) for i in range(len(features))])
return ' '.join([classes, features_and_values])
# Count unique features
# with open(INPUT_FILE, 'r') as input_file:
# for document in input_file:
# classes, features, values = parse(document)
|
|
3a074a43f6e979854de3e2f80dfc5dd733b7d64c
|
double_link_list.py
|
double_link_list.py
|
from __future__ import unicode_literals
class Node(object):
def __init__(self, val, prev=None, next_=None):
self.val = val
self.prev = prev
self.next = next_
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
class DoubleLinkList(object):
"""Class for a doubly-linked list."""
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
"""Print representation of DoubleLinkList."""
node = self.head
output = ""
for node in self:
output += "{!r}, ".format(node.val)
return "({})".format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self, val):
"""Insert value at head of DoubleLinkList.
args:
val: the value to add
"""
current_head = self.head
self.head = Node(val, prev=None, next_=current_head)
current_head.prev = self.head
self.length += 1
return None
def pop(self):
"""Pop the first val off the head and return it."""
if self.head is None:
raise IndexError
else:
to_return = self.head
self.head = to_return.next
self.head.prev = None
self.length -= 1
return to_return.val
def size(self):
"""Return current length of DoubleLinkList."""
return len(self)
def search(self, search_val):
"""Return the node containing val if present, else None.
args:
search_val: the value to search by
returns: a node object or None
"""
for node in self:
if node.val == search_val:
return node
else:
return None
def remove(self, search_node):
"""Remove given node from list, return None.
args:
search_node: the node to be removed
"""
for node in self:
if node == search_node:
node.prev.next = node.next
node.next.prev = node.prev
return None
def display(self):
"""Shows representation of DoubleLinkList."""
return repr(self)
|
Add first pass for double link list
|
Add first pass for double link list
|
Python
|
mit
|
jonathanstallings/data-structures,jay-tyler/data-structures
|
Add first pass for double link list
|
from __future__ import unicode_literals
class Node(object):
def __init__(self, val, prev=None, next_=None):
self.val = val
self.prev = prev
self.next = next_
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
class DoubleLinkList(object):
"""Class for a doubly-linked list."""
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
"""Print representation of DoubleLinkList."""
node = self.head
output = ""
for node in self:
output += "{!r}, ".format(node.val)
return "({})".format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self, val):
"""Insert value at head of DoubleLinkList.
args:
val: the value to add
"""
current_head = self.head
self.head = Node(val, prev=None, next_=current_head)
current_head.prev = self.head
self.length += 1
return None
def pop(self):
"""Pop the first val off the head and return it."""
if self.head is None:
raise IndexError
else:
to_return = self.head
self.head = to_return.next
self.head.prev = None
self.length -= 1
return to_return.val
def size(self):
"""Return current length of DoubleLinkList."""
return len(self)
def search(self, search_val):
"""Return the node containing val if present, else None.
args:
search_val: the value to search by
returns: a node object or None
"""
for node in self:
if node.val == search_val:
return node
else:
return None
def remove(self, search_node):
"""Remove given node from list, return None.
args:
search_node: the node to be removed
"""
for node in self:
if node == search_node:
node.prev.next = node.next
node.next.prev = node.prev
return None
def display(self):
"""Shows representation of DoubleLinkList."""
return repr(self)
|
<commit_before><commit_msg>Add first pass for double link list<commit_after>
|
from __future__ import unicode_literals
class Node(object):
def __init__(self, val, prev=None, next_=None):
self.val = val
self.prev = prev
self.next = next_
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
class DoubleLinkList(object):
"""Class for a doubly-linked list."""
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
"""Print representation of DoubleLinkList."""
node = self.head
output = ""
for node in self:
output += "{!r}, ".format(node.val)
return "({})".format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self, val):
"""Insert value at head of DoubleLinkList.
args:
val: the value to add
"""
current_head = self.head
self.head = Node(val, prev=None, next_=current_head)
current_head.prev = self.head
self.length += 1
return None
def pop(self):
"""Pop the first val off the head and return it."""
if self.head is None:
raise IndexError
else:
to_return = self.head
self.head = to_return.next
self.head.prev = None
self.length -= 1
return to_return.val
def size(self):
"""Return current length of DoubleLinkList."""
return len(self)
def search(self, search_val):
"""Return the node containing val if present, else None.
args:
search_val: the value to search by
returns: a node object or None
"""
for node in self:
if node.val == search_val:
return node
else:
return None
def remove(self, search_node):
"""Remove given node from list, return None.
args:
search_node: the node to be removed
"""
for node in self:
if node == search_node:
node.prev.next = node.next
node.next.prev = node.prev
return None
def display(self):
"""Shows representation of DoubleLinkList."""
return repr(self)
|
Add first pass for double link listfrom __future__ import unicode_literals
class Node(object):
def __init__(self, val, prev=None, next_=None):
self.val = val
self.prev = prev
self.next = next_
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
class DoubleLinkList(object):
"""Class for a doubly-linked list."""
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
"""Print representation of DoubleLinkList."""
node = self.head
output = ""
for node in self:
output += "{!r}, ".format(node.val)
return "({})".format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self, val):
"""Insert value at head of DoubleLinkList.
args:
val: the value to add
"""
current_head = self.head
self.head = Node(val, prev=None, next_=current_head)
current_head.prev = self.head
self.length += 1
return None
def pop(self):
"""Pop the first val off the head and return it."""
if self.head is None:
raise IndexError
else:
to_return = self.head
self.head = to_return.next
self.head.prev = None
self.length -= 1
return to_return.val
def size(self):
"""Return current length of DoubleLinkList."""
return len(self)
def search(self, search_val):
"""Return the node containing val if present, else None.
args:
search_val: the value to search by
returns: a node object or None
"""
for node in self:
if node.val == search_val:
return node
else:
return None
def remove(self, search_node):
"""Remove given node from list, return None.
args:
search_node: the node to be removed
"""
for node in self:
if node == search_node:
node.prev.next = node.next
node.next.prev = node.prev
return None
def display(self):
"""Shows representation of DoubleLinkList."""
return repr(self)
|
<commit_before><commit_msg>Add first pass for double link list<commit_after>from __future__ import unicode_literals
class Node(object):
def __init__(self, val, prev=None, next_=None):
self.val = val
self.prev = prev
self.next = next_
def __repr__(self):
"""Print representation of node."""
return "{val}".format(val=self.val)
class DoubleLinkList(object):
"""Class for a doubly-linked list."""
def __init__(self, iterable=()):
self._current = None
self.head = None
self.length = 0
for val in reversed(iterable):
self.insert(val)
def __repr__(self):
"""Print representation of DoubleLinkList."""
node = self.head
output = ""
for node in self:
output += "{!r}, ".format(node.val)
return "({})".format(output.rstrip(' ,'))
def __len__(self):
return self.length
def __iter__(self):
if self.head is not None:
self._current = self.head
return self
def next(self):
if self._current is None:
raise StopIteration
node = self._current
self._current = self._current.next
return node
def insert(self, val):
"""Insert value at head of DoubleLinkList.
args:
val: the value to add
"""
current_head = self.head
self.head = Node(val, prev=None, next_=current_head)
current_head.prev = self.head
self.length += 1
return None
def pop(self):
"""Pop the first val off the head and return it."""
if self.head is None:
raise IndexError
else:
to_return = self.head
self.head = to_return.next
self.head.prev = None
self.length -= 1
return to_return.val
def size(self):
"""Return current length of DoubleLinkList."""
return len(self)
def search(self, search_val):
"""Return the node containing val if present, else None.
args:
search_val: the value to search by
returns: a node object or None
"""
for node in self:
if node.val == search_val:
return node
else:
return None
def remove(self, search_node):
"""Remove given node from list, return None.
args:
search_node: the node to be removed
"""
for node in self:
if node == search_node:
node.prev.next = node.next
node.next.prev = node.prev
return None
def display(self):
"""Shows representation of DoubleLinkList."""
return repr(self)
|
|
c37ae9178f2f6616b6d03577d6e8129a5bb371fd
|
src/install_services.py
|
src/install_services.py
|
#!/usr/bin/env python3
import os
import socket
def install_pyjob_run():
print('installing pyjob_run')
os.system('stop pyjob_run 2>&1 > /dev/null')
os.system('cp -f ./pyjob_run.conf /etc/init/')
os.system('start pyjob_run 2>&1 > /dev/null &')
def install_pyjob_server():
print('installing pyjob_server')
os.system('stop pyjob_server 2>&1 > /dev/null')
os.system('cp -f ./pyjob_server.conf /etc/init/')
os.system('start pyjob_server 2>&1 > /dev/null &')
install_pyjob_run()
if socket.gethostname() == "lnls82-linux":
install_pyjob_server()
|
Add python script that installs services
|
Add python script that installs services
|
Python
|
mit
|
lnls-fac/job_manager
|
Add python script that installs services
|
#!/usr/bin/env python3
import os
import socket
def install_pyjob_run():
print('installing pyjob_run')
os.system('stop pyjob_run 2>&1 > /dev/null')
os.system('cp -f ./pyjob_run.conf /etc/init/')
os.system('start pyjob_run 2>&1 > /dev/null &')
def install_pyjob_server():
print('installing pyjob_server')
os.system('stop pyjob_server 2>&1 > /dev/null')
os.system('cp -f ./pyjob_server.conf /etc/init/')
os.system('start pyjob_server 2>&1 > /dev/null &')
install_pyjob_run()
if socket.gethostname() == "lnls82-linux":
install_pyjob_server()
|
<commit_before><commit_msg>Add python script that installs services<commit_after>
|
#!/usr/bin/env python3
import os
import socket
def install_pyjob_run():
print('installing pyjob_run')
os.system('stop pyjob_run 2>&1 > /dev/null')
os.system('cp -f ./pyjob_run.conf /etc/init/')
os.system('start pyjob_run 2>&1 > /dev/null &')
def install_pyjob_server():
print('installing pyjob_server')
os.system('stop pyjob_server 2>&1 > /dev/null')
os.system('cp -f ./pyjob_server.conf /etc/init/')
os.system('start pyjob_server 2>&1 > /dev/null &')
install_pyjob_run()
if socket.gethostname() == "lnls82-linux":
install_pyjob_server()
|
Add python script that installs services#!/usr/bin/env python3
import os
import socket
def install_pyjob_run():
print('installing pyjob_run')
os.system('stop pyjob_run 2>&1 > /dev/null')
os.system('cp -f ./pyjob_run.conf /etc/init/')
os.system('start pyjob_run 2>&1 > /dev/null &')
def install_pyjob_server():
print('installing pyjob_server')
os.system('stop pyjob_server 2>&1 > /dev/null')
os.system('cp -f ./pyjob_server.conf /etc/init/')
os.system('start pyjob_server 2>&1 > /dev/null &')
install_pyjob_run()
if socket.gethostname() == "lnls82-linux":
install_pyjob_server()
|
<commit_before><commit_msg>Add python script that installs services<commit_after>#!/usr/bin/env python3
import os
import socket
def install_pyjob_run():
print('installing pyjob_run')
os.system('stop pyjob_run 2>&1 > /dev/null')
os.system('cp -f ./pyjob_run.conf /etc/init/')
os.system('start pyjob_run 2>&1 > /dev/null &')
def install_pyjob_server():
print('installing pyjob_server')
os.system('stop pyjob_server 2>&1 > /dev/null')
os.system('cp -f ./pyjob_server.conf /etc/init/')
os.system('start pyjob_server 2>&1 > /dev/null &')
install_pyjob_run()
if socket.gethostname() == "lnls82-linux":
install_pyjob_server()
|
|
76f79ddf1d796306551d316d032ab4e4075e6647
|
localore/home/migrations/0016_homepage_site_intro.py
|
localore/home/migrations/0016_homepage_site_intro.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0015_auto_20160402_1209'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='site_intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
]
|
Add missed migration for daaab63
|
Add missed migration for daaab63
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
Add missed migration for daaab63
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0015_auto_20160402_1209'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='site_intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
]
|
<commit_before><commit_msg>Add missed migration for daaab63<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0015_auto_20160402_1209'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='site_intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
]
|
Add missed migration for daaab63# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0015_auto_20160402_1209'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='site_intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
]
|
<commit_before><commit_msg>Add missed migration for daaab63<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0015_auto_20160402_1209'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='site_intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
]
|
|
b4cb2758b76633856e2fd701a9469447b75192fc
|
lowfat/migrations/0120_auto_20180206_1505.py
|
lowfat/migrations/0120_auto_20180206_1505.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
|
Add migration for default year
|
Add migration for default year
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Add migration for default year
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
|
<commit_before><commit_msg>Add migration for default year<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
|
Add migration for default year# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
|
<commit_before><commit_msg>Add migration for default year<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
|
|
f5594907ed2b99c88ad0cb6bf66d1f2c46770e78
|
eventkit_cloud/jobs/migrations/add_permissions_to_existing_jobs.py
|
eventkit_cloud/jobs/migrations/add_permissions_to_existing_jobs.py
|
# https://docs.djangoproject.com/en/dev/howto/writing-migrations/#migrations-that-add-unique-fields
from django.db import migrations
from django.contrib.auth.models import Group, User
from ..models import Job
def add_permissions(apps, schema_editor):
JobPermission = apps.get_model("core", "JobPermission")
for job in Job.objects.all():
jp = JobPermission.objects.create(job=job, content_object=job.user,
permission=JobPermission.Permissions.ADMIN.value)
jp.save()
class Migration(migrations.Migration):
dependencies = [
('jobs', '0027_merge_20180417_1210'),
]
operations = [
# Existing jobs are assigned visibility=PRIVATE. Add admin permission for the owner
migrations.RunPython(add_permissions,reverse_code=migrations.RunPython.noop),
]
|
Add permissions for existing jobs via migration
|
Add permissions for existing jobs via migration
|
Python
|
bsd-3-clause
|
venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,terranodo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud
|
Add permissions for existing jobs via migration
|
# https://docs.djangoproject.com/en/dev/howto/writing-migrations/#migrations-that-add-unique-fields
from django.db import migrations
from django.contrib.auth.models import Group, User
from ..models import Job
def add_permissions(apps, schema_editor):
JobPermission = apps.get_model("core", "JobPermission")
for job in Job.objects.all():
jp = JobPermission.objects.create(job=job, content_object=job.user,
permission=JobPermission.Permissions.ADMIN.value)
jp.save()
class Migration(migrations.Migration):
dependencies = [
('jobs', '0027_merge_20180417_1210'),
]
operations = [
# Existing jobs are assigned visibility=PRIVATE. Add admin permission for the owner
migrations.RunPython(add_permissions,reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add permissions for existing jobs via migration<commit_after>
|
# https://docs.djangoproject.com/en/dev/howto/writing-migrations/#migrations-that-add-unique-fields
from django.db import migrations
from django.contrib.auth.models import Group, User
from ..models import Job
def add_permissions(apps, schema_editor):
JobPermission = apps.get_model("core", "JobPermission")
for job in Job.objects.all():
jp = JobPermission.objects.create(job=job, content_object=job.user,
permission=JobPermission.Permissions.ADMIN.value)
jp.save()
class Migration(migrations.Migration):
dependencies = [
('jobs', '0027_merge_20180417_1210'),
]
operations = [
# Existing jobs are assigned visibility=PRIVATE. Add admin permission for the owner
migrations.RunPython(add_permissions,reverse_code=migrations.RunPython.noop),
]
|
Add permissions for existing jobs via migration# https://docs.djangoproject.com/en/dev/howto/writing-migrations/#migrations-that-add-unique-fields
from django.db import migrations
from django.contrib.auth.models import Group, User
from ..models import Job
def add_permissions(apps, schema_editor):
JobPermission = apps.get_model("core", "JobPermission")
for job in Job.objects.all():
jp = JobPermission.objects.create(job=job, content_object=job.user,
permission=JobPermission.Permissions.ADMIN.value)
jp.save()
class Migration(migrations.Migration):
dependencies = [
('jobs', '0027_merge_20180417_1210'),
]
operations = [
# Existing jobs are assigned visibility=PRIVATE. Add admin permission for the owner
migrations.RunPython(add_permissions,reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add permissions for existing jobs via migration<commit_after># https://docs.djangoproject.com/en/dev/howto/writing-migrations/#migrations-that-add-unique-fields
from django.db import migrations
from django.contrib.auth.models import Group, User
from ..models import Job
def add_permissions(apps, schema_editor):
JobPermission = apps.get_model("core", "JobPermission")
for job in Job.objects.all():
jp = JobPermission.objects.create(job=job, content_object=job.user,
permission=JobPermission.Permissions.ADMIN.value)
jp.save()
class Migration(migrations.Migration):
dependencies = [
('jobs', '0027_merge_20180417_1210'),
]
operations = [
# Existing jobs are assigned visibility=PRIVATE. Add admin permission for the owner
migrations.RunPython(add_permissions,reverse_code=migrations.RunPython.noop),
]
|
|
a27c14bb4854a8a6be78da8020b64cb60f271b0a
|
tests/rules_tests/FromLeftRightToRulesTest.py
|
tests/rules_tests/FromLeftRightToRulesTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromLeftRIghtToRulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for rule's left right tests
|
Add file for rule's left right tests
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add file for rule's left right tests
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromLeftRIghtToRulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for rule's left right tests<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromLeftRIghtToRulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for rule's left right tests#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromLeftRIghtToRulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for rule's left right tests<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromLeftRIghtToRulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
|
625d1b51bd83687eb8928647e4fc9b582d8607c3
|
instance/migrations/0013_auto_20150805_1309.py
|
instance/migrations/0013_auto_20150805_1309.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('instance', '0012_auto_20150803_0606'),
]
operations = [
migrations.AlterField(
model_name='openedxinstance',
name='commit_id',
field=models.CharField(max_length=40, validators=[django.core.validators.RegexValidator(message='Full SHA1 hash required', regex='^[0-9a-f]{40}$')]),
),
migrations.AlterField(
model_name='openstackserver',
name='openstack_id',
field=models.CharField(max_length=250, blank=True, db_index=True),
),
]
|
Add missing migration to instance & server objects validation
|
Add missing migration to instance & server objects validation
|
Python
|
agpl-3.0
|
open-craft/opencraft,omarkhan/opencraft,omarkhan/opencraft,open-craft/opencraft,omarkhan/opencraft,open-craft/opencraft,brousch/opencraft,brousch/opencraft,open-craft/opencraft,omarkhan/opencraft,brousch/opencraft,open-craft/opencraft
|
Add missing migration to instance & server objects validation
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('instance', '0012_auto_20150803_0606'),
]
operations = [
migrations.AlterField(
model_name='openedxinstance',
name='commit_id',
field=models.CharField(max_length=40, validators=[django.core.validators.RegexValidator(message='Full SHA1 hash required', regex='^[0-9a-f]{40}$')]),
),
migrations.AlterField(
model_name='openstackserver',
name='openstack_id',
field=models.CharField(max_length=250, blank=True, db_index=True),
),
]
|
<commit_before><commit_msg>Add missing migration to instance & server objects validation<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('instance', '0012_auto_20150803_0606'),
]
operations = [
migrations.AlterField(
model_name='openedxinstance',
name='commit_id',
field=models.CharField(max_length=40, validators=[django.core.validators.RegexValidator(message='Full SHA1 hash required', regex='^[0-9a-f]{40}$')]),
),
migrations.AlterField(
model_name='openstackserver',
name='openstack_id',
field=models.CharField(max_length=250, blank=True, db_index=True),
),
]
|
Add missing migration to instance & server objects validation# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('instance', '0012_auto_20150803_0606'),
]
operations = [
migrations.AlterField(
model_name='openedxinstance',
name='commit_id',
field=models.CharField(max_length=40, validators=[django.core.validators.RegexValidator(message='Full SHA1 hash required', regex='^[0-9a-f]{40}$')]),
),
migrations.AlterField(
model_name='openstackserver',
name='openstack_id',
field=models.CharField(max_length=250, blank=True, db_index=True),
),
]
|
<commit_before><commit_msg>Add missing migration to instance & server objects validation<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('instance', '0012_auto_20150803_0606'),
]
operations = [
migrations.AlterField(
model_name='openedxinstance',
name='commit_id',
field=models.CharField(max_length=40, validators=[django.core.validators.RegexValidator(message='Full SHA1 hash required', regex='^[0-9a-f]{40}$')]),
),
migrations.AlterField(
model_name='openstackserver',
name='openstack_id',
field=models.CharField(max_length=250, blank=True, db_index=True),
),
]
|
|
c888400860baa3a26260fa81ecc8b415aeb6885d
|
source/excerptexport/tests/test_downloads.py
|
source/excerptexport/tests/test_downloads.py
|
import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files import File
from django.contrib.auth.models import User
from excerptexport.models import OutputFile, Excerpt, ExtractionOrder
from excerptexport import settings
class DownloadsTestCase(TestCase):
def setUp(self):
settings.APPLICATION_SETTINGS['data_directory'] = '/tmp/osmaxx-dev-data'
if not os.path.isdir(settings.APPLICATION_SETTINGS['data_directory']):
os.makedirs(settings.APPLICATION_SETTINGS['data_directory'])
settings.APPLICATION_SETTINGS['download_file_name'] = '%(name)s'
def test_file_download(self):
user = User.objects.create_user('user', 'user@example.com', 'pw')
excerpt = Excerpt.objects.create(name='Neverland', is_active=True, is_public=True, owner=user)
extraction_order = ExtractionOrder.objects.create(excerpt=excerpt, orderer=user)
output_file = OutputFile.objects.create(mime_type='test/plain', extraction_order=extraction_order)
file_path = settings.APPLICATION_SETTINGS['data_directory'] + '/' + output_file.public_identifier + '.txt'
with open(file_path, 'w') as file_reference:
new_file = File(file_reference)
new_file.write('Test text')
# file must be committed, so reopen to attach to model
output_file.file = file_path
output_file.save()
response = self.client.get(
reverse('excerptexport:download'),
{'file': output_file.public_identifier}
)
self.assertEqual(response['Content-Length'], str(os.path.getsize(file_path)))
self.assertEqual(
response['Content-Disposition'],
'attachment; filename=%s' % os.path.basename(output_file.file.name)
)
self.assertEqual(b''.join(response.streaming_content), b'Test text')
os.remove(file_path)
|
Add test for file download
|
Add test for file download
|
Python
|
mit
|
geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/drf-utm-zone-info
|
Add test for file download
|
import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files import File
from django.contrib.auth.models import User
from excerptexport.models import OutputFile, Excerpt, ExtractionOrder
from excerptexport import settings
class DownloadsTestCase(TestCase):
def setUp(self):
settings.APPLICATION_SETTINGS['data_directory'] = '/tmp/osmaxx-dev-data'
if not os.path.isdir(settings.APPLICATION_SETTINGS['data_directory']):
os.makedirs(settings.APPLICATION_SETTINGS['data_directory'])
settings.APPLICATION_SETTINGS['download_file_name'] = '%(name)s'
def test_file_download(self):
user = User.objects.create_user('user', 'user@example.com', 'pw')
excerpt = Excerpt.objects.create(name='Neverland', is_active=True, is_public=True, owner=user)
extraction_order = ExtractionOrder.objects.create(excerpt=excerpt, orderer=user)
output_file = OutputFile.objects.create(mime_type='test/plain', extraction_order=extraction_order)
file_path = settings.APPLICATION_SETTINGS['data_directory'] + '/' + output_file.public_identifier + '.txt'
with open(file_path, 'w') as file_reference:
new_file = File(file_reference)
new_file.write('Test text')
# file must be committed, so reopen to attach to model
output_file.file = file_path
output_file.save()
response = self.client.get(
reverse('excerptexport:download'),
{'file': output_file.public_identifier}
)
self.assertEqual(response['Content-Length'], str(os.path.getsize(file_path)))
self.assertEqual(
response['Content-Disposition'],
'attachment; filename=%s' % os.path.basename(output_file.file.name)
)
self.assertEqual(b''.join(response.streaming_content), b'Test text')
os.remove(file_path)
|
<commit_before><commit_msg>Add test for file download<commit_after>
|
import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files import File
from django.contrib.auth.models import User
from excerptexport.models import OutputFile, Excerpt, ExtractionOrder
from excerptexport import settings
class DownloadsTestCase(TestCase):
def setUp(self):
settings.APPLICATION_SETTINGS['data_directory'] = '/tmp/osmaxx-dev-data'
if not os.path.isdir(settings.APPLICATION_SETTINGS['data_directory']):
os.makedirs(settings.APPLICATION_SETTINGS['data_directory'])
settings.APPLICATION_SETTINGS['download_file_name'] = '%(name)s'
def test_file_download(self):
user = User.objects.create_user('user', 'user@example.com', 'pw')
excerpt = Excerpt.objects.create(name='Neverland', is_active=True, is_public=True, owner=user)
extraction_order = ExtractionOrder.objects.create(excerpt=excerpt, orderer=user)
output_file = OutputFile.objects.create(mime_type='test/plain', extraction_order=extraction_order)
file_path = settings.APPLICATION_SETTINGS['data_directory'] + '/' + output_file.public_identifier + '.txt'
with open(file_path, 'w') as file_reference:
new_file = File(file_reference)
new_file.write('Test text')
# file must be committed, so reopen to attach to model
output_file.file = file_path
output_file.save()
response = self.client.get(
reverse('excerptexport:download'),
{'file': output_file.public_identifier}
)
self.assertEqual(response['Content-Length'], str(os.path.getsize(file_path)))
self.assertEqual(
response['Content-Disposition'],
'attachment; filename=%s' % os.path.basename(output_file.file.name)
)
self.assertEqual(b''.join(response.streaming_content), b'Test text')
os.remove(file_path)
|
Add test for file downloadimport os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files import File
from django.contrib.auth.models import User
from excerptexport.models import OutputFile, Excerpt, ExtractionOrder
from excerptexport import settings
class DownloadsTestCase(TestCase):
def setUp(self):
settings.APPLICATION_SETTINGS['data_directory'] = '/tmp/osmaxx-dev-data'
if not os.path.isdir(settings.APPLICATION_SETTINGS['data_directory']):
os.makedirs(settings.APPLICATION_SETTINGS['data_directory'])
settings.APPLICATION_SETTINGS['download_file_name'] = '%(name)s'
def test_file_download(self):
user = User.objects.create_user('user', 'user@example.com', 'pw')
excerpt = Excerpt.objects.create(name='Neverland', is_active=True, is_public=True, owner=user)
extraction_order = ExtractionOrder.objects.create(excerpt=excerpt, orderer=user)
output_file = OutputFile.objects.create(mime_type='test/plain', extraction_order=extraction_order)
file_path = settings.APPLICATION_SETTINGS['data_directory'] + '/' + output_file.public_identifier + '.txt'
with open(file_path, 'w') as file_reference:
new_file = File(file_reference)
new_file.write('Test text')
# file must be committed, so reopen to attach to model
output_file.file = file_path
output_file.save()
response = self.client.get(
reverse('excerptexport:download'),
{'file': output_file.public_identifier}
)
self.assertEqual(response['Content-Length'], str(os.path.getsize(file_path)))
self.assertEqual(
response['Content-Disposition'],
'attachment; filename=%s' % os.path.basename(output_file.file.name)
)
self.assertEqual(b''.join(response.streaming_content), b'Test text')
os.remove(file_path)
|
<commit_before><commit_msg>Add test for file download<commit_after>import os
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files import File
from django.contrib.auth.models import User
from excerptexport.models import OutputFile, Excerpt, ExtractionOrder
from excerptexport import settings
class DownloadsTestCase(TestCase):
def setUp(self):
settings.APPLICATION_SETTINGS['data_directory'] = '/tmp/osmaxx-dev-data'
if not os.path.isdir(settings.APPLICATION_SETTINGS['data_directory']):
os.makedirs(settings.APPLICATION_SETTINGS['data_directory'])
settings.APPLICATION_SETTINGS['download_file_name'] = '%(name)s'
def test_file_download(self):
user = User.objects.create_user('user', 'user@example.com', 'pw')
excerpt = Excerpt.objects.create(name='Neverland', is_active=True, is_public=True, owner=user)
extraction_order = ExtractionOrder.objects.create(excerpt=excerpt, orderer=user)
output_file = OutputFile.objects.create(mime_type='test/plain', extraction_order=extraction_order)
file_path = settings.APPLICATION_SETTINGS['data_directory'] + '/' + output_file.public_identifier + '.txt'
with open(file_path, 'w') as file_reference:
new_file = File(file_reference)
new_file.write('Test text')
# file must be committed, so reopen to attach to model
output_file.file = file_path
output_file.save()
response = self.client.get(
reverse('excerptexport:download'),
{'file': output_file.public_identifier}
)
self.assertEqual(response['Content-Length'], str(os.path.getsize(file_path)))
self.assertEqual(
response['Content-Disposition'],
'attachment; filename=%s' % os.path.basename(output_file.file.name)
)
self.assertEqual(b''.join(response.streaming_content), b'Test text')
os.remove(file_path)
|
|
f028e7638b02fad40561a2eca28d2bcfea34d064
|
numba/tests/test_fastmath.py
|
numba/tests/test_fastmath.py
|
from __future__ import print_function, absolute_import
import math
import numpy as np
from numba import unittest_support as unittest
from numba.tests.support import captured_stdout, override_config
from numba import njit, vectorize, guvectorize
class TestFastMath(unittest.TestCase):
def test_jit(self):
def foo(x):
return x + math.sin(x)
fastfoo = njit(fastmath=True)(foo)
slowfoo = njit(foo)
self.assertEqual(fastfoo(0.5), slowfoo(0.5))
fastllvm = fastfoo.inspect_llvm(fastfoo.signatures[0])
slowllvm = slowfoo.inspect_llvm(slowfoo.signatures[0])
# Ensure fast attribute in fast version only
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_vectorize(self):
def foo(x):
return x + math.sin(x)
fastfoo = vectorize(fastmath=True)(foo)
slowfoo = vectorize(foo)
x = np.random.random(8).astype(np.float32)
# capture the optimized llvm to check for fast flag
with override_config('DUMP_OPTIMIZED', True):
with captured_stdout() as slow_cap:
expect = slowfoo(x)
slowllvm = slow_cap.getvalue()
with captured_stdout() as fast_cap:
got = fastfoo(x)
fastllvm = fast_cap.getvalue()
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_guvectorize(self):
def foo(x, out):
out[0] = x + math.sin(x)
x = np.random.random(8).astype(np.float32)
with override_config('DUMP_OPTIMIZED', True):
types = ['(float32, float32[:])']
sig = '()->()'
with captured_stdout() as fast_cap:
fastfoo = guvectorize(types, sig, fastmath=True)(foo)
fastllvm = fast_cap.getvalue()
with captured_stdout() as slow_cap:
slowfoo = guvectorize(types, sig)(foo)
slowllvm = slow_cap.getvalue()
expect = slowfoo(x)
got = fastfoo(x)
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
if __name__ == '__main__':
unittest.main()
|
Add test cases for fastmath flag
|
Add test cases for fastmath flag
|
Python
|
bsd-2-clause
|
cpcloud/numba,jriehl/numba,seibert/numba,stuartarchibald/numba,IntelLabs/numba,stuartarchibald/numba,numba/numba,gmarkall/numba,sklam/numba,stonebig/numba,cpcloud/numba,cpcloud/numba,sklam/numba,seibert/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,IntelLabs/numba,stonebig/numba,stuartarchibald/numba,jriehl/numba,cpcloud/numba,IntelLabs/numba,jriehl/numba,seibert/numba,stuartarchibald/numba,gmarkall/numba,stuartarchibald/numba,jriehl/numba,gmarkall/numba,numba/numba,jriehl/numba,seibert/numba,sklam/numba,stonebig/numba,sklam/numba,stonebig/numba,numba/numba,cpcloud/numba,stonebig/numba,sklam/numba,gmarkall/numba,numba/numba,gmarkall/numba,numba/numba
|
Add test cases for fastmath flag
|
from __future__ import print_function, absolute_import
import math
import numpy as np
from numba import unittest_support as unittest
from numba.tests.support import captured_stdout, override_config
from numba import njit, vectorize, guvectorize
class TestFastMath(unittest.TestCase):
def test_jit(self):
def foo(x):
return x + math.sin(x)
fastfoo = njit(fastmath=True)(foo)
slowfoo = njit(foo)
self.assertEqual(fastfoo(0.5), slowfoo(0.5))
fastllvm = fastfoo.inspect_llvm(fastfoo.signatures[0])
slowllvm = slowfoo.inspect_llvm(slowfoo.signatures[0])
# Ensure fast attribute in fast version only
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_vectorize(self):
def foo(x):
return x + math.sin(x)
fastfoo = vectorize(fastmath=True)(foo)
slowfoo = vectorize(foo)
x = np.random.random(8).astype(np.float32)
# capture the optimized llvm to check for fast flag
with override_config('DUMP_OPTIMIZED', True):
with captured_stdout() as slow_cap:
expect = slowfoo(x)
slowllvm = slow_cap.getvalue()
with captured_stdout() as fast_cap:
got = fastfoo(x)
fastllvm = fast_cap.getvalue()
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_guvectorize(self):
def foo(x, out):
out[0] = x + math.sin(x)
x = np.random.random(8).astype(np.float32)
with override_config('DUMP_OPTIMIZED', True):
types = ['(float32, float32[:])']
sig = '()->()'
with captured_stdout() as fast_cap:
fastfoo = guvectorize(types, sig, fastmath=True)(foo)
fastllvm = fast_cap.getvalue()
with captured_stdout() as slow_cap:
slowfoo = guvectorize(types, sig)(foo)
slowllvm = slow_cap.getvalue()
expect = slowfoo(x)
got = fastfoo(x)
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test cases for fastmath flag<commit_after>
|
from __future__ import print_function, absolute_import
import math
import numpy as np
from numba import unittest_support as unittest
from numba.tests.support import captured_stdout, override_config
from numba import njit, vectorize, guvectorize
class TestFastMath(unittest.TestCase):
def test_jit(self):
def foo(x):
return x + math.sin(x)
fastfoo = njit(fastmath=True)(foo)
slowfoo = njit(foo)
self.assertEqual(fastfoo(0.5), slowfoo(0.5))
fastllvm = fastfoo.inspect_llvm(fastfoo.signatures[0])
slowllvm = slowfoo.inspect_llvm(slowfoo.signatures[0])
# Ensure fast attribute in fast version only
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_vectorize(self):
def foo(x):
return x + math.sin(x)
fastfoo = vectorize(fastmath=True)(foo)
slowfoo = vectorize(foo)
x = np.random.random(8).astype(np.float32)
# capture the optimized llvm to check for fast flag
with override_config('DUMP_OPTIMIZED', True):
with captured_stdout() as slow_cap:
expect = slowfoo(x)
slowllvm = slow_cap.getvalue()
with captured_stdout() as fast_cap:
got = fastfoo(x)
fastllvm = fast_cap.getvalue()
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_guvectorize(self):
def foo(x, out):
out[0] = x + math.sin(x)
x = np.random.random(8).astype(np.float32)
with override_config('DUMP_OPTIMIZED', True):
types = ['(float32, float32[:])']
sig = '()->()'
with captured_stdout() as fast_cap:
fastfoo = guvectorize(types, sig, fastmath=True)(foo)
fastllvm = fast_cap.getvalue()
with captured_stdout() as slow_cap:
slowfoo = guvectorize(types, sig)(foo)
slowllvm = slow_cap.getvalue()
expect = slowfoo(x)
got = fastfoo(x)
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
if __name__ == '__main__':
unittest.main()
|
Add test cases for fastmath flagfrom __future__ import print_function, absolute_import
import math
import numpy as np
from numba import unittest_support as unittest
from numba.tests.support import captured_stdout, override_config
from numba import njit, vectorize, guvectorize
class TestFastMath(unittest.TestCase):
def test_jit(self):
def foo(x):
return x + math.sin(x)
fastfoo = njit(fastmath=True)(foo)
slowfoo = njit(foo)
self.assertEqual(fastfoo(0.5), slowfoo(0.5))
fastllvm = fastfoo.inspect_llvm(fastfoo.signatures[0])
slowllvm = slowfoo.inspect_llvm(slowfoo.signatures[0])
# Ensure fast attribute in fast version only
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_vectorize(self):
def foo(x):
return x + math.sin(x)
fastfoo = vectorize(fastmath=True)(foo)
slowfoo = vectorize(foo)
x = np.random.random(8).astype(np.float32)
# capture the optimized llvm to check for fast flag
with override_config('DUMP_OPTIMIZED', True):
with captured_stdout() as slow_cap:
expect = slowfoo(x)
slowllvm = slow_cap.getvalue()
with captured_stdout() as fast_cap:
got = fastfoo(x)
fastllvm = fast_cap.getvalue()
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_guvectorize(self):
def foo(x, out):
out[0] = x + math.sin(x)
x = np.random.random(8).astype(np.float32)
with override_config('DUMP_OPTIMIZED', True):
types = ['(float32, float32[:])']
sig = '()->()'
with captured_stdout() as fast_cap:
fastfoo = guvectorize(types, sig, fastmath=True)(foo)
fastllvm = fast_cap.getvalue()
with captured_stdout() as slow_cap:
slowfoo = guvectorize(types, sig)(foo)
slowllvm = slow_cap.getvalue()
expect = slowfoo(x)
got = fastfoo(x)
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test cases for fastmath flag<commit_after>from __future__ import print_function, absolute_import
import math
import numpy as np
from numba import unittest_support as unittest
from numba.tests.support import captured_stdout, override_config
from numba import njit, vectorize, guvectorize
class TestFastMath(unittest.TestCase):
def test_jit(self):
def foo(x):
return x + math.sin(x)
fastfoo = njit(fastmath=True)(foo)
slowfoo = njit(foo)
self.assertEqual(fastfoo(0.5), slowfoo(0.5))
fastllvm = fastfoo.inspect_llvm(fastfoo.signatures[0])
slowllvm = slowfoo.inspect_llvm(slowfoo.signatures[0])
# Ensure fast attribute in fast version only
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_vectorize(self):
def foo(x):
return x + math.sin(x)
fastfoo = vectorize(fastmath=True)(foo)
slowfoo = vectorize(foo)
x = np.random.random(8).astype(np.float32)
# capture the optimized llvm to check for fast flag
with override_config('DUMP_OPTIMIZED', True):
with captured_stdout() as slow_cap:
expect = slowfoo(x)
slowllvm = slow_cap.getvalue()
with captured_stdout() as fast_cap:
got = fastfoo(x)
fastllvm = fast_cap.getvalue()
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
def test_guvectorize(self):
def foo(x, out):
out[0] = x + math.sin(x)
x = np.random.random(8).astype(np.float32)
with override_config('DUMP_OPTIMIZED', True):
types = ['(float32, float32[:])']
sig = '()->()'
with captured_stdout() as fast_cap:
fastfoo = guvectorize(types, sig, fastmath=True)(foo)
fastllvm = fast_cap.getvalue()
with captured_stdout() as slow_cap:
slowfoo = guvectorize(types, sig)(foo)
slowllvm = slow_cap.getvalue()
expect = slowfoo(x)
got = fastfoo(x)
np.testing.assert_almost_equal(expect, got)
self.assertIn('fadd fast', fastllvm)
self.assertIn('call fast', fastllvm)
self.assertNotIn('fadd fast', slowllvm)
self.assertNotIn('call fast', slowllvm)
if __name__ == '__main__':
unittest.main()
|
|
edc482e33236ad775d6cb7c941ef02575db36f4c
|
factorial_variations.py
|
factorial_variations.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Program s několika variacemi implementace funkce faktorial.
http://www.pythontutor.com/visualize.html#mode=edit
"""
def factorial_iter(n):
if n == 0:
return 1
result = 1
while n >= 1:
result = result * n
n = n - 1
return result
def factorial_iter_opt(n):
if n == 0 or n == 1:
return 1
result = 1
while n > 1:
result = result * n
n = n - 1
return result
def factorial_rec(n):
if n == 0 or n == 1:
return 1
return n * factorial_rec(n - 1)
if __name__ == "__main__":
print(factorial_iter(3))
print(factorial_iter_opt(3))
print(factorial_rec(3))
|
Create a new lesson file
|
Create a new lesson file
|
Python
|
mit
|
holkasepsem/python-lessons
|
Create a new lesson file
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Program s několika variacemi implementace funkce faktorial.
http://www.pythontutor.com/visualize.html#mode=edit
"""
def factorial_iter(n):
if n == 0:
return 1
result = 1
while n >= 1:
result = result * n
n = n - 1
return result
def factorial_iter_opt(n):
if n == 0 or n == 1:
return 1
result = 1
while n > 1:
result = result * n
n = n - 1
return result
def factorial_rec(n):
if n == 0 or n == 1:
return 1
return n * factorial_rec(n - 1)
if __name__ == "__main__":
print(factorial_iter(3))
print(factorial_iter_opt(3))
print(factorial_rec(3))
|
<commit_before><commit_msg>Create a new lesson file<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Program s několika variacemi implementace funkce faktorial.
http://www.pythontutor.com/visualize.html#mode=edit
"""
def factorial_iter(n):
if n == 0:
return 1
result = 1
while n >= 1:
result = result * n
n = n - 1
return result
def factorial_iter_opt(n):
if n == 0 or n == 1:
return 1
result = 1
while n > 1:
result = result * n
n = n - 1
return result
def factorial_rec(n):
if n == 0 or n == 1:
return 1
return n * factorial_rec(n - 1)
if __name__ == "__main__":
print(factorial_iter(3))
print(factorial_iter_opt(3))
print(factorial_rec(3))
|
Create a new lesson file#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Program s několika variacemi implementace funkce faktorial.
http://www.pythontutor.com/visualize.html#mode=edit
"""
def factorial_iter(n):
if n == 0:
return 1
result = 1
while n >= 1:
result = result * n
n = n - 1
return result
def factorial_iter_opt(n):
if n == 0 or n == 1:
return 1
result = 1
while n > 1:
result = result * n
n = n - 1
return result
def factorial_rec(n):
if n == 0 or n == 1:
return 1
return n * factorial_rec(n - 1)
if __name__ == "__main__":
print(factorial_iter(3))
print(factorial_iter_opt(3))
print(factorial_rec(3))
|
<commit_before><commit_msg>Create a new lesson file<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Program s několika variacemi implementace funkce faktorial.
http://www.pythontutor.com/visualize.html#mode=edit
"""
def factorial_iter(n):
if n == 0:
return 1
result = 1
while n >= 1:
result = result * n
n = n - 1
return result
def factorial_iter_opt(n):
if n == 0 or n == 1:
return 1
result = 1
while n > 1:
result = result * n
n = n - 1
return result
def factorial_rec(n):
if n == 0 or n == 1:
return 1
return n * factorial_rec(n - 1)
if __name__ == "__main__":
print(factorial_iter(3))
print(factorial_iter_opt(3))
print(factorial_rec(3))
|
|
e4043f88c77d5aff1962c73f126176f1328f0c27
|
python-files/generate-allpasses.py
|
python-files/generate-allpasses.py
|
#!/usr/bin/env python3
from itertools import product
import pickle
import sys
import time
from schedulingbazaar import (load_tles,
load_gs,
compute_all_passes,
load_all_passes)
sats = load_tles('amateur.txt')
stations = load_gs('groundstations.txt')
start_time = '2017/6/8 00:00:00'
# duration = 8760 #a year worth of hours
duration = 3
line = '-- %-30s -------------'
print(line % 'Computing passes')
tree = compute_all_passes(stations[:3],
sats[:3],
start_time,
duration=duration,
dbfile='somepasses.db')
# give the filesystem some time to finish closing the database file
time.sleep(1)
print(line % 'Save/load as pickle')
pickle.dump(tree, open('somepasses.pkl', 'wb'))
time.sleep(1)
treepkl = pickle.load(open('somepasses.pkl', 'rb'))
print(line % 'Load from db')
treeload = load_all_passes('somepasses.db')
print(line % 'All diffs should be empty')
trees = (tree, treepkl, treeload)
nfail = 0
for a,b in product(trees, trees):
diff = a.difference(b)
print(diff)
if len(diff) != 0:
nfail += 1
if nfail == 0:
print(line % 'All good!')
else:
print(line % 'FAILURES DETECTED')
sys.exit(nfail)
|
Add a utility to generate the master passes db
|
Add a utility to generate the master passes db
Assumes that amateur.txt and groundstations.txt are in the same folder.
|
Python
|
agpl-3.0
|
valpo-sats/scheduling-bazaar,valpo-sats/scheduling-bazaar
|
Add a utility to generate the master passes db
Assumes that amateur.txt and groundstations.txt are in the same folder.
|
#!/usr/bin/env python3
from itertools import product
import pickle
import sys
import time
from schedulingbazaar import (load_tles,
load_gs,
compute_all_passes,
load_all_passes)
sats = load_tles('amateur.txt')
stations = load_gs('groundstations.txt')
start_time = '2017/6/8 00:00:00'
# duration = 8760 #a year worth of hours
duration = 3
line = '-- %-30s -------------'
print(line % 'Computing passes')
tree = compute_all_passes(stations[:3],
sats[:3],
start_time,
duration=duration,
dbfile='somepasses.db')
# give the filesystem some time to finish closing the database file
time.sleep(1)
print(line % 'Save/load as pickle')
pickle.dump(tree, open('somepasses.pkl', 'wb'))
time.sleep(1)
treepkl = pickle.load(open('somepasses.pkl', 'rb'))
print(line % 'Load from db')
treeload = load_all_passes('somepasses.db')
print(line % 'All diffs should be empty')
trees = (tree, treepkl, treeload)
nfail = 0
for a,b in product(trees, trees):
diff = a.difference(b)
print(diff)
if len(diff) != 0:
nfail += 1
if nfail == 0:
print(line % 'All good!')
else:
print(line % 'FAILURES DETECTED')
sys.exit(nfail)
|
<commit_before><commit_msg>Add a utility to generate the master passes db
Assumes that amateur.txt and groundstations.txt are in the same folder.<commit_after>
|
#!/usr/bin/env python3
from itertools import product
import pickle
import sys
import time
from schedulingbazaar import (load_tles,
load_gs,
compute_all_passes,
load_all_passes)
sats = load_tles('amateur.txt')
stations = load_gs('groundstations.txt')
start_time = '2017/6/8 00:00:00'
# duration = 8760 #a year worth of hours
duration = 3
line = '-- %-30s -------------'
print(line % 'Computing passes')
tree = compute_all_passes(stations[:3],
sats[:3],
start_time,
duration=duration,
dbfile='somepasses.db')
# give the filesystem some time to finish closing the database file
time.sleep(1)
print(line % 'Save/load as pickle')
pickle.dump(tree, open('somepasses.pkl', 'wb'))
time.sleep(1)
treepkl = pickle.load(open('somepasses.pkl', 'rb'))
print(line % 'Load from db')
treeload = load_all_passes('somepasses.db')
print(line % 'All diffs should be empty')
trees = (tree, treepkl, treeload)
nfail = 0
for a,b in product(trees, trees):
diff = a.difference(b)
print(diff)
if len(diff) != 0:
nfail += 1
if nfail == 0:
print(line % 'All good!')
else:
print(line % 'FAILURES DETECTED')
sys.exit(nfail)
|
Add a utility to generate the master passes db
Assumes that amateur.txt and groundstations.txt are in the same folder.#!/usr/bin/env python3
from itertools import product
import pickle
import sys
import time
from schedulingbazaar import (load_tles,
load_gs,
compute_all_passes,
load_all_passes)
sats = load_tles('amateur.txt')
stations = load_gs('groundstations.txt')
start_time = '2017/6/8 00:00:00'
# duration = 8760 #a year worth of hours
duration = 3
line = '-- %-30s -------------'
print(line % 'Computing passes')
tree = compute_all_passes(stations[:3],
sats[:3],
start_time,
duration=duration,
dbfile='somepasses.db')
# give the filesystem some time to finish closing the database file
time.sleep(1)
print(line % 'Save/load as pickle')
pickle.dump(tree, open('somepasses.pkl', 'wb'))
time.sleep(1)
treepkl = pickle.load(open('somepasses.pkl', 'rb'))
print(line % 'Load from db')
treeload = load_all_passes('somepasses.db')
print(line % 'All diffs should be empty')
trees = (tree, treepkl, treeload)
nfail = 0
for a,b in product(trees, trees):
diff = a.difference(b)
print(diff)
if len(diff) != 0:
nfail += 1
if nfail == 0:
print(line % 'All good!')
else:
print(line % 'FAILURES DETECTED')
sys.exit(nfail)
|
<commit_before><commit_msg>Add a utility to generate the master passes db
Assumes that amateur.txt and groundstations.txt are in the same folder.<commit_after>#!/usr/bin/env python3
from itertools import product
import pickle
import sys
import time
from schedulingbazaar import (load_tles,
load_gs,
compute_all_passes,
load_all_passes)
sats = load_tles('amateur.txt')
stations = load_gs('groundstations.txt')
start_time = '2017/6/8 00:00:00'
# duration = 8760 #a year worth of hours
duration = 3
line = '-- %-30s -------------'
print(line % 'Computing passes')
tree = compute_all_passes(stations[:3],
sats[:3],
start_time,
duration=duration,
dbfile='somepasses.db')
# give the filesystem some time to finish closing the database file
time.sleep(1)
print(line % 'Save/load as pickle')
pickle.dump(tree, open('somepasses.pkl', 'wb'))
time.sleep(1)
treepkl = pickle.load(open('somepasses.pkl', 'rb'))
print(line % 'Load from db')
treeload = load_all_passes('somepasses.db')
print(line % 'All diffs should be empty')
trees = (tree, treepkl, treeload)
nfail = 0
for a,b in product(trees, trees):
diff = a.difference(b)
print(diff)
if len(diff) != 0:
nfail += 1
if nfail == 0:
print(line % 'All good!')
else:
print(line % 'FAILURES DETECTED')
sys.exit(nfail)
|
|
624f771e79d36cd013b2372feafc6d4dff37da39
|
edgedb/lang/common/datastructures/tests/test_all.py
|
edgedb/lang/common/datastructures/tests/test_all.py
|
##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import pickle
from semantix.utils.datastructures import Void
from semantix.utils.debug import assert_raises
class TestUtilsDSAll:
def test_utils_ds_markers_pickle(self):
assert pickle.loads(pickle.dumps(Void)) is Void
assert not Void
with assert_raises(TypeError, error_re='instantiated'):
Void()
|
Add pickle unittests for Markers
|
utils.datastructures.tests: Add pickle unittests for Markers
|
Python
|
apache-2.0
|
edgedb/edgedb,edgedb/edgedb,edgedb/edgedb
|
utils.datastructures.tests: Add pickle unittests for Markers
|
##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import pickle
from semantix.utils.datastructures import Void
from semantix.utils.debug import assert_raises
class TestUtilsDSAll:
def test_utils_ds_markers_pickle(self):
assert pickle.loads(pickle.dumps(Void)) is Void
assert not Void
with assert_raises(TypeError, error_re='instantiated'):
Void()
|
<commit_before><commit_msg>utils.datastructures.tests: Add pickle unittests for Markers<commit_after>
|
##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import pickle
from semantix.utils.datastructures import Void
from semantix.utils.debug import assert_raises
class TestUtilsDSAll:
def test_utils_ds_markers_pickle(self):
assert pickle.loads(pickle.dumps(Void)) is Void
assert not Void
with assert_raises(TypeError, error_re='instantiated'):
Void()
|
utils.datastructures.tests: Add pickle unittests for Markers##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import pickle
from semantix.utils.datastructures import Void
from semantix.utils.debug import assert_raises
class TestUtilsDSAll:
def test_utils_ds_markers_pickle(self):
assert pickle.loads(pickle.dumps(Void)) is Void
assert not Void
with assert_raises(TypeError, error_re='instantiated'):
Void()
|
<commit_before><commit_msg>utils.datastructures.tests: Add pickle unittests for Markers<commit_after>##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import pickle
from semantix.utils.datastructures import Void
from semantix.utils.debug import assert_raises
class TestUtilsDSAll:
def test_utils_ds_markers_pickle(self):
assert pickle.loads(pickle.dumps(Void)) is Void
assert not Void
with assert_raises(TypeError, error_re='instantiated'):
Void()
|
|
fa4a12125d0ce3651452dd8b7049167b3c413eda
|
cnxarchive/sql/migrations/20160511151028_add_update_default_modules_stateid_trigger.py
|
cnxarchive/sql/migrations/20160511151028_add_update_default_modules_stateid_trigger.py
|
# -*- coding: utf-8 -*-
"""Add a trigger on modules to set all new inserted books to have state
"post-publication" in case there is a ruleset for collation.
"""
def up(cursor):
cursor.execute("""\
CREATE OR REPLACE FUNCTION update_default_modules_stateid ()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
IF NEW.portal_type = 'Collection' THEN
NEW.stateid = 5;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER update_default_modules_stateid
BEFORE INSERT ON modules FOR EACH ROW
EXECUTE PROCEDURE update_default_modules_stateid();""")
def down(cursor):
cursor.execute(
'DROP TRIGGER IF EXISTS update_default_modules_stateid ON modules')
cursor.execute('DROP FUNCTION IF EXISTS update_default_modules_stateid()')
|
Add migration to add sql function for updating default stateid
|
Add migration to add sql function for updating default stateid
|
Python
|
agpl-3.0
|
Connexions/cnx-archive,Connexions/cnx-archive
|
Add migration to add sql function for updating default stateid
|
# -*- coding: utf-8 -*-
"""Add a trigger on modules to set all new inserted books to have state
"post-publication" in case there is a ruleset for collation.
"""
def up(cursor):
cursor.execute("""\
CREATE OR REPLACE FUNCTION update_default_modules_stateid ()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
IF NEW.portal_type = 'Collection' THEN
NEW.stateid = 5;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER update_default_modules_stateid
BEFORE INSERT ON modules FOR EACH ROW
EXECUTE PROCEDURE update_default_modules_stateid();""")
def down(cursor):
cursor.execute(
'DROP TRIGGER IF EXISTS update_default_modules_stateid ON modules')
cursor.execute('DROP FUNCTION IF EXISTS update_default_modules_stateid()')
|
<commit_before><commit_msg>Add migration to add sql function for updating default stateid<commit_after>
|
# -*- coding: utf-8 -*-
"""Add a trigger on modules to set all new inserted books to have state
"post-publication" in case there is a ruleset for collation.
"""
def up(cursor):
cursor.execute("""\
CREATE OR REPLACE FUNCTION update_default_modules_stateid ()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
IF NEW.portal_type = 'Collection' THEN
NEW.stateid = 5;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER update_default_modules_stateid
BEFORE INSERT ON modules FOR EACH ROW
EXECUTE PROCEDURE update_default_modules_stateid();""")
def down(cursor):
cursor.execute(
'DROP TRIGGER IF EXISTS update_default_modules_stateid ON modules')
cursor.execute('DROP FUNCTION IF EXISTS update_default_modules_stateid()')
|
Add migration to add sql function for updating default stateid# -*- coding: utf-8 -*-
"""Add a trigger on modules to set all new inserted books to have state
"post-publication" in case there is a ruleset for collation.
"""
def up(cursor):
cursor.execute("""\
CREATE OR REPLACE FUNCTION update_default_modules_stateid ()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
IF NEW.portal_type = 'Collection' THEN
NEW.stateid = 5;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER update_default_modules_stateid
BEFORE INSERT ON modules FOR EACH ROW
EXECUTE PROCEDURE update_default_modules_stateid();""")
def down(cursor):
cursor.execute(
'DROP TRIGGER IF EXISTS update_default_modules_stateid ON modules')
cursor.execute('DROP FUNCTION IF EXISTS update_default_modules_stateid()')
|
<commit_before><commit_msg>Add migration to add sql function for updating default stateid<commit_after># -*- coding: utf-8 -*-
"""Add a trigger on modules to set all new inserted books to have state
"post-publication" in case there is a ruleset for collation.
"""
def up(cursor):
cursor.execute("""\
CREATE OR REPLACE FUNCTION update_default_modules_stateid ()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
IF NEW.portal_type = 'Collection' THEN
NEW.stateid = 5;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER update_default_modules_stateid
BEFORE INSERT ON modules FOR EACH ROW
EXECUTE PROCEDURE update_default_modules_stateid();""")
def down(cursor):
cursor.execute(
'DROP TRIGGER IF EXISTS update_default_modules_stateid ON modules')
cursor.execute('DROP FUNCTION IF EXISTS update_default_modules_stateid()')
|
|
f3187499a19b2656e44f97e274ea63c996a742a5
|
fpsd/test/test_utils.py
|
fpsd/test/test_utils.py
|
#!/usr/bin/env python3.5
import unittest
import datetime
from utils import get_lookback
class LookbackTimeTest(unittest.TestCase):
def test_one_week(self):
self.assertEqual(get_lookback('1w'), datetime.timedelta(7))
def test_four_weeks(self):
self.assertEqual(get_lookback('4w'), datetime.timedelta(28))
def test_one_month(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('1m')
def test_no_units(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('666')
|
Add tests for new utils
|
Add tests for new utils
|
Python
|
agpl-3.0
|
freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/FingerprintSecureDrop
|
Add tests for new utils
|
#!/usr/bin/env python3.5
import unittest
import datetime
from utils import get_lookback
class LookbackTimeTest(unittest.TestCase):
def test_one_week(self):
self.assertEqual(get_lookback('1w'), datetime.timedelta(7))
def test_four_weeks(self):
self.assertEqual(get_lookback('4w'), datetime.timedelta(28))
def test_one_month(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('1m')
def test_no_units(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('666')
|
<commit_before><commit_msg>Add tests for new utils<commit_after>
|
#!/usr/bin/env python3.5
import unittest
import datetime
from utils import get_lookback
class LookbackTimeTest(unittest.TestCase):
def test_one_week(self):
self.assertEqual(get_lookback('1w'), datetime.timedelta(7))
def test_four_weeks(self):
self.assertEqual(get_lookback('4w'), datetime.timedelta(28))
def test_one_month(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('1m')
def test_no_units(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('666')
|
Add tests for new utils#!/usr/bin/env python3.5
import unittest
import datetime
from utils import get_lookback
class LookbackTimeTest(unittest.TestCase):
def test_one_week(self):
self.assertEqual(get_lookback('1w'), datetime.timedelta(7))
def test_four_weeks(self):
self.assertEqual(get_lookback('4w'), datetime.timedelta(28))
def test_one_month(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('1m')
def test_no_units(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('666')
|
<commit_before><commit_msg>Add tests for new utils<commit_after>#!/usr/bin/env python3.5
import unittest
import datetime
from utils import get_lookback
class LookbackTimeTest(unittest.TestCase):
def test_one_week(self):
self.assertEqual(get_lookback('1w'), datetime.timedelta(7))
def test_four_weeks(self):
self.assertEqual(get_lookback('4w'), datetime.timedelta(28))
def test_one_month(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('1m')
def test_no_units(self):
with self.assertRaises(TypeError):
lookback_time = get_lookback('666')
|
|
d9fbe6bb6f2916cd29daefe476696c8279b49123
|
workshops/migrations/0005_auto_20150404_0953.py
|
workshops/migrations/0005_auto_20150404_0953.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0004_merge'),
]
operations = [
migrations.AlterField(
model_name='person',
name='groups',
field=models.ManyToManyField(help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', blank=True, to='auth.Group', related_name='user_set', related_query_name='user', verbose_name='groups'),
),
migrations.AlterField(
model_name='person',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
]
|
Add important migration to Django 1.8
|
Add important migration to Django 1.8
This migration makes `Person.last_login` NULL-able.
|
Python
|
mit
|
wking/swc-amy,wking/swc-amy,pbanaszkiewicz/amy,swcarpentry/amy,swcarpentry/amy,vahtras/amy,shapiromatron/amy,shapiromatron/amy,shapiromatron/amy,vahtras/amy,wking/swc-amy,swcarpentry/amy,wking/swc-amy,pbanaszkiewicz/amy,vahtras/amy,pbanaszkiewicz/amy
|
Add important migration to Django 1.8
This migration makes `Person.last_login` NULL-able.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0004_merge'),
]
operations = [
migrations.AlterField(
model_name='person',
name='groups',
field=models.ManyToManyField(help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', blank=True, to='auth.Group', related_name='user_set', related_query_name='user', verbose_name='groups'),
),
migrations.AlterField(
model_name='person',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
]
|
<commit_before><commit_msg>Add important migration to Django 1.8
This migration makes `Person.last_login` NULL-able.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0004_merge'),
]
operations = [
migrations.AlterField(
model_name='person',
name='groups',
field=models.ManyToManyField(help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', blank=True, to='auth.Group', related_name='user_set', related_query_name='user', verbose_name='groups'),
),
migrations.AlterField(
model_name='person',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
]
|
Add important migration to Django 1.8
This migration makes `Person.last_login` NULL-able.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0004_merge'),
]
operations = [
migrations.AlterField(
model_name='person',
name='groups',
field=models.ManyToManyField(help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', blank=True, to='auth.Group', related_name='user_set', related_query_name='user', verbose_name='groups'),
),
migrations.AlterField(
model_name='person',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
]
|
<commit_before><commit_msg>Add important migration to Django 1.8
This migration makes `Person.last_login` NULL-able.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0004_merge'),
]
operations = [
migrations.AlterField(
model_name='person',
name='groups',
field=models.ManyToManyField(help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', blank=True, to='auth.Group', related_name='user_set', related_query_name='user', verbose_name='groups'),
),
migrations.AlterField(
model_name='person',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
]
|
|
d0403b168b45ae9dc3bd4d22a50e01a12273b8a2
|
test/test_datasource.py
|
test/test_datasource.py
|
"""
Test cases for the datasource classes.
:author: Martin Norbury (mnorbury@lcogt.net)
"""
import json
from nose.tools import eq_
from mock import patch
import datasource
class TestDataSource(object):
def __init__(self):
self.source = datasource.JenkinsDataSource('buildsba:8085')
@patch('datasource.urlopen')
def test_blue_if_all_builds_are_blue(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue', ])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_all_builds_are_yellow(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', ])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_all_builds_are_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', ])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_any_build_is_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_any_build_is_yellow_but_not_failed(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 1}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue_anime'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 2}, data)
def _configure_mock(mock_urlopen, colors):
mock_connection = mock_urlopen()
raw_data = {'jobs': [dict(color=color) for color in colors]}
mock_connection.read.return_value = json.dumps(raw_data).encode()
|
Test cases for the datasource class.
|
Test cases for the datasource class.
|
Python
|
mit
|
mnorbury/pyblink
|
Test cases for the datasource class.
|
"""
Test cases for the datasource classes.
:author: Martin Norbury (mnorbury@lcogt.net)
"""
import json
from nose.tools import eq_
from mock import patch
import datasource
class TestDataSource(object):
def __init__(self):
self.source = datasource.JenkinsDataSource('buildsba:8085')
@patch('datasource.urlopen')
def test_blue_if_all_builds_are_blue(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue', ])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_all_builds_are_yellow(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', ])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_all_builds_are_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', ])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_any_build_is_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_any_build_is_yellow_but_not_failed(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 1}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue_anime'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 2}, data)
def _configure_mock(mock_urlopen, colors):
mock_connection = mock_urlopen()
raw_data = {'jobs': [dict(color=color) for color in colors]}
mock_connection.read.return_value = json.dumps(raw_data).encode()
|
<commit_before><commit_msg>Test cases for the datasource class.<commit_after>
|
"""
Test cases for the datasource classes.
:author: Martin Norbury (mnorbury@lcogt.net)
"""
import json
from nose.tools import eq_
from mock import patch
import datasource
class TestDataSource(object):
def __init__(self):
self.source = datasource.JenkinsDataSource('buildsba:8085')
@patch('datasource.urlopen')
def test_blue_if_all_builds_are_blue(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue', ])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_all_builds_are_yellow(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', ])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_all_builds_are_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', ])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_any_build_is_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_any_build_is_yellow_but_not_failed(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 1}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue_anime'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 2}, data)
def _configure_mock(mock_urlopen, colors):
mock_connection = mock_urlopen()
raw_data = {'jobs': [dict(color=color) for color in colors]}
mock_connection.read.return_value = json.dumps(raw_data).encode()
|
Test cases for the datasource class."""
Test cases for the datasource classes.
:author: Martin Norbury (mnorbury@lcogt.net)
"""
import json
from nose.tools import eq_
from mock import patch
import datasource
class TestDataSource(object):
def __init__(self):
self.source = datasource.JenkinsDataSource('buildsba:8085')
@patch('datasource.urlopen')
def test_blue_if_all_builds_are_blue(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue', ])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_all_builds_are_yellow(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', ])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_all_builds_are_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', ])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_any_build_is_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_any_build_is_yellow_but_not_failed(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 1}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue_anime'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 2}, data)
def _configure_mock(mock_urlopen, colors):
mock_connection = mock_urlopen()
raw_data = {'jobs': [dict(color=color) for color in colors]}
mock_connection.read.return_value = json.dumps(raw_data).encode()
|
<commit_before><commit_msg>Test cases for the datasource class.<commit_after>"""
Test cases for the datasource classes.
:author: Martin Norbury (mnorbury@lcogt.net)
"""
import json
from nose.tools import eq_
from mock import patch
import datasource
class TestDataSource(object):
def __init__(self):
self.source = datasource.JenkinsDataSource('buildsba:8085')
@patch('datasource.urlopen')
def test_blue_if_all_builds_are_blue(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue', ])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_all_builds_are_yellow(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', ])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_all_builds_are_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', ])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_red_if_any_build_is_red(self, mock_urlopen):
_configure_mock(mock_urlopen, ['red', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'red', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_yellow_if_any_build_is_yellow_but_not_failed(self, mock_urlopen):
_configure_mock(mock_urlopen, ['yellow', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'yellow', 'activity': 0}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 1}, data)
@patch('datasource.urlopen')
def test_blue_with_activity_if_building(self, mock_urlopen):
_configure_mock(mock_urlopen, ['blue_anime', 'blue_anime'])
data = self.source.retrieve_data()
eq_({'color': 'blue', 'activity': 2}, data)
def _configure_mock(mock_urlopen, colors):
mock_connection = mock_urlopen()
raw_data = {'jobs': [dict(color=color) for color in colors]}
mock_connection.read.return_value = json.dumps(raw_data).encode()
|
|
675fbbad8917e2ce2d9395c47a42650bd0e4f697
|
tests/test_extensions.py
|
tests/test_extensions.py
|
"""Test suites for the extension management features."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import pytest
from rpmvenv.extensions import loader
def test_loader_deterministic_order():
"""Test that the extensions are always loaded in requested order."""
extensions = ("file_permissions", "file_extras", "python_venv", "blocks")
for selection in itertools.permutations(extensions):
results = loader.load_extensions(selection)
results = (result.name for result in results if result.name != "core")
assert tuple(results) == selection
|
Add test to reproduce ordering issues
|
Add test to reproduce ordering issues
This is an attempt to reproduce an issue reported related to the
non-deterministic ordering of extensions.
|
Python
|
mit
|
kevinconway/rpmvenv
|
Add test to reproduce ordering issues
This is an attempt to reproduce an issue reported related to the
non-deterministic ordering of extensions.
|
"""Test suites for the extension management features."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import pytest
from rpmvenv.extensions import loader
def test_loader_deterministic_order():
"""Test that the extensions are always loaded in requested order."""
extensions = ("file_permissions", "file_extras", "python_venv", "blocks")
for selection in itertools.permutations(extensions):
results = loader.load_extensions(selection)
results = (result.name for result in results if result.name != "core")
assert tuple(results) == selection
|
<commit_before><commit_msg>Add test to reproduce ordering issues
This is an attempt to reproduce an issue reported related to the
non-deterministic ordering of extensions.<commit_after>
|
"""Test suites for the extension management features."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import pytest
from rpmvenv.extensions import loader
def test_loader_deterministic_order():
"""Test that the extensions are always loaded in requested order."""
extensions = ("file_permissions", "file_extras", "python_venv", "blocks")
for selection in itertools.permutations(extensions):
results = loader.load_extensions(selection)
results = (result.name for result in results if result.name != "core")
assert tuple(results) == selection
|
Add test to reproduce ordering issues
This is an attempt to reproduce an issue reported related to the
non-deterministic ordering of extensions."""Test suites for the extension management features."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import pytest
from rpmvenv.extensions import loader
def test_loader_deterministic_order():
"""Test that the extensions are always loaded in requested order."""
extensions = ("file_permissions", "file_extras", "python_venv", "blocks")
for selection in itertools.permutations(extensions):
results = loader.load_extensions(selection)
results = (result.name for result in results if result.name != "core")
assert tuple(results) == selection
|
<commit_before><commit_msg>Add test to reproduce ordering issues
This is an attempt to reproduce an issue reported related to the
non-deterministic ordering of extensions.<commit_after>"""Test suites for the extension management features."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import itertools
import pytest
from rpmvenv.extensions import loader
def test_loader_deterministic_order():
"""Test that the extensions are always loaded in requested order."""
extensions = ("file_permissions", "file_extras", "python_venv", "blocks")
for selection in itertools.permutations(extensions):
results = loader.load_extensions(selection)
results = (result.name for result in results if result.name != "core")
assert tuple(results) == selection
|
|
9759c4605199443f2331f4d2fbbd991cc737f7af
|
avalanche/evaluation/metrics/forward_transfer.py
|
avalanche/evaluation/metrics/forward_transfer.py
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 4-02-2021 #
# Author(s): Ryan Lindeborg #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from typing import Dict, Union
from avalanche.evaluation.metric_definitions import Metric
class ForwardTransfer(Metric[Union[float, None, Dict[int, float]]]):
"""
The standalone Forward Transfer metric.
This metric returns the forward transfer relative to a specific key.
Alternatively, this metric returns a dict in which each key is associated
to the forward transfer.
Forward transfer is computed as the difference between the value recorded for a specific key up until the immediately preceding task, and random initialization of the model before training
The value associated to a key can be update with the `update` method.
At initialization, this metric returns an empty dictionary.
"""
def __init__(self):
"""
Creates an instance of the standalone Forward Transfer metric
"""
super().__init__()
self.initial: Dict[int, float] = dict()
"""
The initial value for each key. This is the accuracy at random initialization.
"""
self.previous: Dict[int, float] = dict()
"""
The previous task value detected for each key
"""
def update_initial(self, k, v):
self.initial[k] = v
def update_previous(self, k, v):
self.previous[k] = v
def update(self, k, v, initial=False):
if initial:
self.update_initial(k, v)
else:
self.update_previous(k, v)
def result(self, k=None) -> Union[float, None, Dict[int, float]]:
"""
Forward transfer is not returned for the last task.
:param k: the key for which returning forward transfer. If k is None,
forward transfer will be returned for all keys except the last one.
:return: the difference between the previous task key value and the key at random initialization.
"""
forward_transfer = {}
if k is not None:
if k in self.previous:
return self.previous[k] - self.initial[k]
else:
return None
previous_keys = set(self.previous.keys())
for k in self.previous.keys():
forward_transfer[k] = self.previous[k] - self.initial[k]
return forward_transfer
def reset_previous(self) -> None:
self.previous: Dict[int, float] = dict()
def reset(self) -> None:
self.initial: Dict[int, float] = dict()
self.previous: Dict[int, float] = dict()
|
Set up base class for Forward Transfer metric
|
Set up base class for Forward Transfer metric
|
Python
|
mit
|
ContinualAI/avalanche,ContinualAI/avalanche
|
Set up base class for Forward Transfer metric
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 4-02-2021 #
# Author(s): Ryan Lindeborg #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from typing import Dict, Union
from avalanche.evaluation.metric_definitions import Metric
class ForwardTransfer(Metric[Union[float, None, Dict[int, float]]]):
"""
The standalone Forward Transfer metric.
This metric returns the forward transfer relative to a specific key.
Alternatively, this metric returns a dict in which each key is associated
to the forward transfer.
Forward transfer is computed as the difference between the value recorded for a specific key up until the immediately preceding task, and random initialization of the model before training
The value associated to a key can be update with the `update` method.
At initialization, this metric returns an empty dictionary.
"""
def __init__(self):
"""
Creates an instance of the standalone Forward Transfer metric
"""
super().__init__()
self.initial: Dict[int, float] = dict()
"""
The initial value for each key. This is the accuracy at random initialization.
"""
self.previous: Dict[int, float] = dict()
"""
The previous task value detected for each key
"""
def update_initial(self, k, v):
self.initial[k] = v
def update_previous(self, k, v):
self.previous[k] = v
def update(self, k, v, initial=False):
if initial:
self.update_initial(k, v)
else:
self.update_previous(k, v)
def result(self, k=None) -> Union[float, None, Dict[int, float]]:
"""
Forward transfer is not returned for the last task.
:param k: the key for which returning forward transfer. If k is None,
forward transfer will be returned for all keys except the last one.
:return: the difference between the previous task key value and the key at random initialization.
"""
forward_transfer = {}
if k is not None:
if k in self.previous:
return self.previous[k] - self.initial[k]
else:
return None
previous_keys = set(self.previous.keys())
for k in self.previous.keys():
forward_transfer[k] = self.previous[k] - self.initial[k]
return forward_transfer
def reset_previous(self) -> None:
self.previous: Dict[int, float] = dict()
def reset(self) -> None:
self.initial: Dict[int, float] = dict()
self.previous: Dict[int, float] = dict()
|
<commit_before><commit_msg>Set up base class for Forward Transfer metric<commit_after>
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 4-02-2021 #
# Author(s): Ryan Lindeborg #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from typing import Dict, Union
from avalanche.evaluation.metric_definitions import Metric
class ForwardTransfer(Metric[Union[float, None, Dict[int, float]]]):
"""
The standalone Forward Transfer metric.
This metric returns the forward transfer relative to a specific key.
Alternatively, this metric returns a dict in which each key is associated
to the forward transfer.
Forward transfer is computed as the difference between the value recorded for a specific key up until the immediately preceding task, and random initialization of the model before training
The value associated to a key can be update with the `update` method.
At initialization, this metric returns an empty dictionary.
"""
def __init__(self):
"""
Creates an instance of the standalone Forward Transfer metric
"""
super().__init__()
self.initial: Dict[int, float] = dict()
"""
The initial value for each key. This is the accuracy at random initialization.
"""
self.previous: Dict[int, float] = dict()
"""
The previous task value detected for each key
"""
def update_initial(self, k, v):
self.initial[k] = v
def update_previous(self, k, v):
self.previous[k] = v
def update(self, k, v, initial=False):
if initial:
self.update_initial(k, v)
else:
self.update_previous(k, v)
def result(self, k=None) -> Union[float, None, Dict[int, float]]:
"""
Forward transfer is not returned for the last task.
:param k: the key for which returning forward transfer. If k is None,
forward transfer will be returned for all keys except the last one.
:return: the difference between the previous task key value and the key at random initialization.
"""
forward_transfer = {}
if k is not None:
if k in self.previous:
return self.previous[k] - self.initial[k]
else:
return None
previous_keys = set(self.previous.keys())
for k in self.previous.keys():
forward_transfer[k] = self.previous[k] - self.initial[k]
return forward_transfer
def reset_previous(self) -> None:
self.previous: Dict[int, float] = dict()
def reset(self) -> None:
self.initial: Dict[int, float] = dict()
self.previous: Dict[int, float] = dict()
|
Set up base class for Forward Transfer metric################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 4-02-2021 #
# Author(s): Ryan Lindeborg #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from typing import Dict, Union
from avalanche.evaluation.metric_definitions import Metric
class ForwardTransfer(Metric[Union[float, None, Dict[int, float]]]):
"""
The standalone Forward Transfer metric.
This metric returns the forward transfer relative to a specific key.
Alternatively, this metric returns a dict in which each key is associated
to the forward transfer.
Forward transfer is computed as the difference between the value recorded for a specific key up until the immediately preceding task, and random initialization of the model before training
The value associated to a key can be update with the `update` method.
At initialization, this metric returns an empty dictionary.
"""
def __init__(self):
"""
Creates an instance of the standalone Forward Transfer metric
"""
super().__init__()
self.initial: Dict[int, float] = dict()
"""
The initial value for each key. This is the accuracy at random initialization.
"""
self.previous: Dict[int, float] = dict()
"""
The previous task value detected for each key
"""
def update_initial(self, k, v):
self.initial[k] = v
def update_previous(self, k, v):
self.previous[k] = v
def update(self, k, v, initial=False):
if initial:
self.update_initial(k, v)
else:
self.update_previous(k, v)
def result(self, k=None) -> Union[float, None, Dict[int, float]]:
"""
Forward transfer is not returned for the last task.
:param k: the key for which returning forward transfer. If k is None,
forward transfer will be returned for all keys except the last one.
:return: the difference between the previous task key value and the key at random initialization.
"""
forward_transfer = {}
if k is not None:
if k in self.previous:
return self.previous[k] - self.initial[k]
else:
return None
previous_keys = set(self.previous.keys())
for k in self.previous.keys():
forward_transfer[k] = self.previous[k] - self.initial[k]
return forward_transfer
def reset_previous(self) -> None:
self.previous: Dict[int, float] = dict()
def reset(self) -> None:
self.initial: Dict[int, float] = dict()
self.previous: Dict[int, float] = dict()
|
<commit_before><commit_msg>Set up base class for Forward Transfer metric<commit_after>################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 4-02-2021 #
# Author(s): Ryan Lindeborg #
# E-mail: contact@continualai.org #
# Website: avalanche.continualai.org #
################################################################################
from typing import Dict, Union
from avalanche.evaluation.metric_definitions import Metric
class ForwardTransfer(Metric[Union[float, None, Dict[int, float]]]):
"""
The standalone Forward Transfer metric.
This metric returns the forward transfer relative to a specific key.
Alternatively, this metric returns a dict in which each key is associated
to the forward transfer.
Forward transfer is computed as the difference between the value recorded for a specific key up until the immediately preceding task, and random initialization of the model before training
The value associated to a key can be update with the `update` method.
At initialization, this metric returns an empty dictionary.
"""
def __init__(self):
"""
Creates an instance of the standalone Forward Transfer metric
"""
super().__init__()
self.initial: Dict[int, float] = dict()
"""
The initial value for each key. This is the accuracy at random initialization.
"""
self.previous: Dict[int, float] = dict()
"""
The previous task value detected for each key
"""
def update_initial(self, k, v):
self.initial[k] = v
def update_previous(self, k, v):
self.previous[k] = v
def update(self, k, v, initial=False):
if initial:
self.update_initial(k, v)
else:
self.update_previous(k, v)
def result(self, k=None) -> Union[float, None, Dict[int, float]]:
"""
Forward transfer is not returned for the last task.
:param k: the key for which returning forward transfer. If k is None,
forward transfer will be returned for all keys except the last one.
:return: the difference between the previous task key value and the key at random initialization.
"""
forward_transfer = {}
if k is not None:
if k in self.previous:
return self.previous[k] - self.initial[k]
else:
return None
previous_keys = set(self.previous.keys())
for k in self.previous.keys():
forward_transfer[k] = self.previous[k] - self.initial[k]
return forward_transfer
def reset_previous(self) -> None:
self.previous: Dict[int, float] = dict()
def reset(self) -> None:
self.initial: Dict[int, float] = dict()
self.previous: Dict[int, float] = dict()
|
|
e98a15c3277d86ea8b588abcfbbcaec5eea5e9b8
|
test/test_spacelinks.py
|
test/test_spacelinks.py
|
import pytest
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
environ = {
'tiddlyweb.config': {
'markdown.wiki_link_base': '',
'server_host': {
'host': 'tiddlyspace.org',
'port': '8080',
'scheme': 'http',
}
}
}
def test_simple_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit @cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_escaped_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit ~@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' not in output
assert '@cdent' in output
def test_spacelink_first():
tiddler = Tiddler('test')
tiddler.text = '@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
tiddler.text = '\n\n@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_spacewiki_link():
tiddler = Tiddler('test')
tiddler.text = "This is WikiLink@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/WikiLink">WikiLink</a>' in output
def test_spacefree_link():
tiddler = Tiddler('test')
tiddler.text = "This is [[Free Link]]@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/Free%20Link">Free Link</a>' in output
|
Add the tests that drove the previous commit.
|
Add the tests that drove the previous commit.
|
Python
|
bsd-2-clause
|
tiddlyweb/tiddlywebplugins.markdown
|
Add the tests that drove the previous commit.
|
import pytest
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
environ = {
'tiddlyweb.config': {
'markdown.wiki_link_base': '',
'server_host': {
'host': 'tiddlyspace.org',
'port': '8080',
'scheme': 'http',
}
}
}
def test_simple_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit @cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_escaped_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit ~@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' not in output
assert '@cdent' in output
def test_spacelink_first():
tiddler = Tiddler('test')
tiddler.text = '@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
tiddler.text = '\n\n@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_spacewiki_link():
tiddler = Tiddler('test')
tiddler.text = "This is WikiLink@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/WikiLink">WikiLink</a>' in output
def test_spacefree_link():
tiddler = Tiddler('test')
tiddler.text = "This is [[Free Link]]@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/Free%20Link">Free Link</a>' in output
|
<commit_before><commit_msg>Add the tests that drove the previous commit.<commit_after>
|
import pytest
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
environ = {
'tiddlyweb.config': {
'markdown.wiki_link_base': '',
'server_host': {
'host': 'tiddlyspace.org',
'port': '8080',
'scheme': 'http',
}
}
}
def test_simple_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit @cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_escaped_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit ~@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' not in output
assert '@cdent' in output
def test_spacelink_first():
tiddler = Tiddler('test')
tiddler.text = '@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
tiddler.text = '\n\n@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_spacewiki_link():
tiddler = Tiddler('test')
tiddler.text = "This is WikiLink@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/WikiLink">WikiLink</a>' in output
def test_spacefree_link():
tiddler = Tiddler('test')
tiddler.text = "This is [[Free Link]]@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/Free%20Link">Free Link</a>' in output
|
Add the tests that drove the previous commit.
import pytest
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
environ = {
'tiddlyweb.config': {
'markdown.wiki_link_base': '',
'server_host': {
'host': 'tiddlyspace.org',
'port': '8080',
'scheme': 'http',
}
}
}
def test_simple_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit @cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_escaped_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit ~@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' not in output
assert '@cdent' in output
def test_spacelink_first():
tiddler = Tiddler('test')
tiddler.text = '@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
tiddler.text = '\n\n@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_spacewiki_link():
tiddler = Tiddler('test')
tiddler.text = "This is WikiLink@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/WikiLink">WikiLink</a>' in output
def test_spacefree_link():
tiddler = Tiddler('test')
tiddler.text = "This is [[Free Link]]@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/Free%20Link">Free Link</a>' in output
|
<commit_before><commit_msg>Add the tests that drove the previous commit.<commit_after>
import pytest
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
environ = {
'tiddlyweb.config': {
'markdown.wiki_link_base': '',
'server_host': {
'host': 'tiddlyspace.org',
'port': '8080',
'scheme': 'http',
}
}
}
def test_simple_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit @cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_escaped_spacelink():
tiddler = Tiddler('test')
tiddler.text = '# Hi\nVisit ~@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' not in output
assert '@cdent' in output
def test_spacelink_first():
tiddler = Tiddler('test')
tiddler.text = '@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
tiddler.text = '\n\n@cdent for more info.'
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/">@cdent</a>' in output
def test_spacewiki_link():
tiddler = Tiddler('test')
tiddler.text = "This is WikiLink@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/WikiLink">WikiLink</a>' in output
def test_spacefree_link():
tiddler = Tiddler('test')
tiddler.text = "This is [[Free Link]]@cdent"
output = render(tiddler, environ)
assert '<a href="http://cdent.tiddlyspace.org:8080/Free%20Link">Free Link</a>' in output
|
|
c808e41ac7e61fe9aabfb6b7694453bcb903fab4
|
py/palindromic-substrings.py
|
py/palindromic-substrings.py
|
class Solution(object):
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
P = 10 ** 9 + 7
Q = 65535
inv = lambda x: pow(x % P, P - 2, P)
ls = len(s)
ans = 0
Q_inv = inv(Q)
for lp in xrange(1, ls + 1):
lrolling, rrolling = 0, 0
for i in xrange(lp):
losi = ord(s[i])
rosi = ord(s[lp - 1 - i])
lrolling = (lrolling * Q + losi) % P
rrolling = (rrolling * Q + rosi) % P
Q_lp = pow(Q, lp, P)
for i in xrange(lp, ls):
if lrolling == rrolling:
ans += 1
losi = ord(s[i - lp])
rosi = ord(s[i])
lrolling = (lrolling * Q + rosi + ((P - losi) * Q_lp)) % P
rrolling = ((rrolling + P - losi + rosi * Q_lp) * Q_inv) % P
if lrolling == rrolling:
ans += 1
return ans
|
Add py solution for 647. Palindromic Substrings
|
Add py solution for 647. Palindromic Substrings
647. Palindromic Substrings: https://leetcode.com/problems/palindromic-substrings/
Approach:
O(n^2) with Rolling hash: https://en.wikipedia.org/wiki/Rolling_hash
It's kind of overkill though...
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 647. Palindromic Substrings
647. Palindromic Substrings: https://leetcode.com/problems/palindromic-substrings/
Approach:
O(n^2) with Rolling hash: https://en.wikipedia.org/wiki/Rolling_hash
It's kind of overkill though...
|
class Solution(object):
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
P = 10 ** 9 + 7
Q = 65535
inv = lambda x: pow(x % P, P - 2, P)
ls = len(s)
ans = 0
Q_inv = inv(Q)
for lp in xrange(1, ls + 1):
lrolling, rrolling = 0, 0
for i in xrange(lp):
losi = ord(s[i])
rosi = ord(s[lp - 1 - i])
lrolling = (lrolling * Q + losi) % P
rrolling = (rrolling * Q + rosi) % P
Q_lp = pow(Q, lp, P)
for i in xrange(lp, ls):
if lrolling == rrolling:
ans += 1
losi = ord(s[i - lp])
rosi = ord(s[i])
lrolling = (lrolling * Q + rosi + ((P - losi) * Q_lp)) % P
rrolling = ((rrolling + P - losi + rosi * Q_lp) * Q_inv) % P
if lrolling == rrolling:
ans += 1
return ans
|
<commit_before><commit_msg>Add py solution for 647. Palindromic Substrings
647. Palindromic Substrings: https://leetcode.com/problems/palindromic-substrings/
Approach:
O(n^2) with Rolling hash: https://en.wikipedia.org/wiki/Rolling_hash
It's kind of overkill though...<commit_after>
|
class Solution(object):
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
P = 10 ** 9 + 7
Q = 65535
inv = lambda x: pow(x % P, P - 2, P)
ls = len(s)
ans = 0
Q_inv = inv(Q)
for lp in xrange(1, ls + 1):
lrolling, rrolling = 0, 0
for i in xrange(lp):
losi = ord(s[i])
rosi = ord(s[lp - 1 - i])
lrolling = (lrolling * Q + losi) % P
rrolling = (rrolling * Q + rosi) % P
Q_lp = pow(Q, lp, P)
for i in xrange(lp, ls):
if lrolling == rrolling:
ans += 1
losi = ord(s[i - lp])
rosi = ord(s[i])
lrolling = (lrolling * Q + rosi + ((P - losi) * Q_lp)) % P
rrolling = ((rrolling + P - losi + rosi * Q_lp) * Q_inv) % P
if lrolling == rrolling:
ans += 1
return ans
|
Add py solution for 647. Palindromic Substrings
647. Palindromic Substrings: https://leetcode.com/problems/palindromic-substrings/
Approach:
O(n^2) with Rolling hash: https://en.wikipedia.org/wiki/Rolling_hash
It's kind of overkill though...class Solution(object):
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
P = 10 ** 9 + 7
Q = 65535
inv = lambda x: pow(x % P, P - 2, P)
ls = len(s)
ans = 0
Q_inv = inv(Q)
for lp in xrange(1, ls + 1):
lrolling, rrolling = 0, 0
for i in xrange(lp):
losi = ord(s[i])
rosi = ord(s[lp - 1 - i])
lrolling = (lrolling * Q + losi) % P
rrolling = (rrolling * Q + rosi) % P
Q_lp = pow(Q, lp, P)
for i in xrange(lp, ls):
if lrolling == rrolling:
ans += 1
losi = ord(s[i - lp])
rosi = ord(s[i])
lrolling = (lrolling * Q + rosi + ((P - losi) * Q_lp)) % P
rrolling = ((rrolling + P - losi + rosi * Q_lp) * Q_inv) % P
if lrolling == rrolling:
ans += 1
return ans
|
<commit_before><commit_msg>Add py solution for 647. Palindromic Substrings
647. Palindromic Substrings: https://leetcode.com/problems/palindromic-substrings/
Approach:
O(n^2) with Rolling hash: https://en.wikipedia.org/wiki/Rolling_hash
It's kind of overkill though...<commit_after>class Solution(object):
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
P = 10 ** 9 + 7
Q = 65535
inv = lambda x: pow(x % P, P - 2, P)
ls = len(s)
ans = 0
Q_inv = inv(Q)
for lp in xrange(1, ls + 1):
lrolling, rrolling = 0, 0
for i in xrange(lp):
losi = ord(s[i])
rosi = ord(s[lp - 1 - i])
lrolling = (lrolling * Q + losi) % P
rrolling = (rrolling * Q + rosi) % P
Q_lp = pow(Q, lp, P)
for i in xrange(lp, ls):
if lrolling == rrolling:
ans += 1
losi = ord(s[i - lp])
rosi = ord(s[i])
lrolling = (lrolling * Q + rosi + ((P - losi) * Q_lp)) % P
rrolling = ((rrolling + P - losi + rosi * Q_lp) * Q_inv) % P
if lrolling == rrolling:
ans += 1
return ans
|
|
f21017d5d611b7dda9db52eb80f1a9cdb70fcac5
|
cms/djangoapps/contentstore/migrations/0004_remove_push_notification_configmodel_table.py
|
cms/djangoapps/contentstore/migrations/0004_remove_push_notification_configmodel_table.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-26 20:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0003_remove_assets_page_flag'),
]
operations = [
migrations.RemoveField(
model_name='pushnotificationconfig',
name='changed_by',
),
migrations.DeleteModel(
name='PushNotificationConfig',
),
]
|
Remove the configmodel for push notifications.
|
Remove the configmodel for push notifications.
This feature was used to enable/disable push notifications using
Parse.com. That service died a few years ago but this was never cleaned
up. Cleaning it up as a part of DEPR-41 now that we've removed the code
that references this model.
|
Python
|
agpl-3.0
|
eduNEXT/edunext-platform,mitocw/edx-platform,cpennington/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,ESOedX/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,appsembler/edx-platform,edx-solutions/edx-platform,msegado/edx-platform,appsembler/edx-platform,edx-solutions/edx-platform,angelapper/edx-platform,stvstnfrd/edx-platform,cpennington/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,appsembler/edx-platform,edx/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,mitocw/edx-platform,edx/edx-platform,stvstnfrd/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,cpennington/edx-platform,angelapper/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,edx-solutions/edx-platform,ESOedX/edx-platform,edx/edx-platform,edx/edx-platform,msegado/edx-platform,mitocw/edx-platform,mitocw/edx-platform,arbrandes/edx-platform,msegado/edx-platform,appsembler/edx-platform
|
Remove the configmodel for push notifications.
This feature was used to enable/disable push notifications using
Parse.com. That service died a few years ago but this was never cleaned
up. Cleaning it up as a part of DEPR-41 now that we've removed the code
that references this model.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-26 20:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0003_remove_assets_page_flag'),
]
operations = [
migrations.RemoveField(
model_name='pushnotificationconfig',
name='changed_by',
),
migrations.DeleteModel(
name='PushNotificationConfig',
),
]
|
<commit_before><commit_msg>Remove the configmodel for push notifications.
This feature was used to enable/disable push notifications using
Parse.com. That service died a few years ago but this was never cleaned
up. Cleaning it up as a part of DEPR-41 now that we've removed the code
that references this model.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-26 20:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0003_remove_assets_page_flag'),
]
operations = [
migrations.RemoveField(
model_name='pushnotificationconfig',
name='changed_by',
),
migrations.DeleteModel(
name='PushNotificationConfig',
),
]
|
Remove the configmodel for push notifications.
This feature was used to enable/disable push notifications using
Parse.com. That service died a few years ago but this was never cleaned
up. Cleaning it up as a part of DEPR-41 now that we've removed the code
that references this model.# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-26 20:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0003_remove_assets_page_flag'),
]
operations = [
migrations.RemoveField(
model_name='pushnotificationconfig',
name='changed_by',
),
migrations.DeleteModel(
name='PushNotificationConfig',
),
]
|
<commit_before><commit_msg>Remove the configmodel for push notifications.
This feature was used to enable/disable push notifications using
Parse.com. That service died a few years ago but this was never cleaned
up. Cleaning it up as a part of DEPR-41 now that we've removed the code
that references this model.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-26 20:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0003_remove_assets_page_flag'),
]
operations = [
migrations.RemoveField(
model_name='pushnotificationconfig',
name='changed_by',
),
migrations.DeleteModel(
name='PushNotificationConfig',
),
]
|
|
6b074c25fb4d0e93268f4f2bf5a1830c6df73ab8
|
tests/make_epoch_dow.py
|
tests/make_epoch_dow.py
|
#!/usr/bin/env python
"""
Program to compute the day of week for the epoch used.
"""
import datetime
print('// Day of week calculation needs to know the starting condition')
for n in range(0, 28, 4): # Limit to multiples of 4 since that limit is in the C code
year = 1970 + n
d = datetime.datetime(year, 1, 1)
if n:
conditional = '#elif'
else:
conditional = '#if'
print('%s (RTX_EPOCH - 1970) %% 28 == %d' % (conditional, n))
print(d.strftime('const uint8_t RTCx::epochDow = %w; // %A'))
print('#endif')
|
Add Python program to calculate the day of week at start of epoch
|
Add Python program to calculate the day of week at start of epoch
|
Python
|
lgpl-2.1
|
stevemarple/RTCx,stevemarple/RTCx
|
Add Python program to calculate the day of week at start of epoch
|
#!/usr/bin/env python
"""
Program to compute the day of week for the epoch used.
"""
import datetime
print('// Day of week calculation needs to know the starting condition')
for n in range(0, 28, 4): # Limit to multiples of 4 since that limit is in the C code
year = 1970 + n
d = datetime.datetime(year, 1, 1)
if n:
conditional = '#elif'
else:
conditional = '#if'
print('%s (RTX_EPOCH - 1970) %% 28 == %d' % (conditional, n))
print(d.strftime('const uint8_t RTCx::epochDow = %w; // %A'))
print('#endif')
|
<commit_before><commit_msg>Add Python program to calculate the day of week at start of epoch<commit_after>
|
#!/usr/bin/env python
"""
Program to compute the day of week for the epoch used.
"""
import datetime
print('// Day of week calculation needs to know the starting condition')
for n in range(0, 28, 4): # Limit to multiples of 4 since that limit is in the C code
year = 1970 + n
d = datetime.datetime(year, 1, 1)
if n:
conditional = '#elif'
else:
conditional = '#if'
print('%s (RTX_EPOCH - 1970) %% 28 == %d' % (conditional, n))
print(d.strftime('const uint8_t RTCx::epochDow = %w; // %A'))
print('#endif')
|
Add Python program to calculate the day of week at start of epoch#!/usr/bin/env python
"""
Program to compute the day of week for the epoch used.
"""
import datetime
print('// Day of week calculation needs to know the starting condition')
for n in range(0, 28, 4): # Limit to multiples of 4 since that limit is in the C code
year = 1970 + n
d = datetime.datetime(year, 1, 1)
if n:
conditional = '#elif'
else:
conditional = '#if'
print('%s (RTX_EPOCH - 1970) %% 28 == %d' % (conditional, n))
print(d.strftime('const uint8_t RTCx::epochDow = %w; // %A'))
print('#endif')
|
<commit_before><commit_msg>Add Python program to calculate the day of week at start of epoch<commit_after>#!/usr/bin/env python
"""
Program to compute the day of week for the epoch used.
"""
import datetime
print('// Day of week calculation needs to know the starting condition')
for n in range(0, 28, 4): # Limit to multiples of 4 since that limit is in the C code
year = 1970 + n
d = datetime.datetime(year, 1, 1)
if n:
conditional = '#elif'
else:
conditional = '#if'
print('%s (RTX_EPOCH - 1970) %% 28 == %d' % (conditional, n))
print(d.strftime('const uint8_t RTCx::epochDow = %w; // %A'))
print('#endif')
|
|
3c31c48d6b23b639c750eb63f93a395ee8a66c92
|
usr/examples/test_all.py
|
usr/examples/test_all.py
|
import sensor, time
# Set sensor gainceiling
sensor.set_gainceiling(16)
# Set sensor brightness
sensor.set_brightness(-2)
# Set sensor to grayscale
sensor.set_pixformat(sensor.GRAYSCALE)
def test_surf(sensor):
clock = time.clock()
for x in range(100):
image = sensor.snapshot()
clock.tick()
kp = image.find_keypoints(upright=False, thresh=0.0004, octaves=2)
image.draw_keypoints(kp)
print (clock.avg())
def test_haar(sensor):
# Load Haar Cascade
cascade = HaarCascade("0:/frontalface_default.cascade")
print(cascade)
clock = time.clock()
for x in range(100):
clock.tick()
image = sensor.snapshot()
objects = image.find_features(cascade)
for r in objects:
image.draw_rectangle(r)
print (clock.fps())
while (True):
test_surf(sensor)
test_haar(sensor)
|
Add script to do combined tests
|
Add script to do combined tests
|
Python
|
mit
|
tianzhihen/openmv,SmartArduino/openmv,openmv/openmv,SmartArduino/openmv,openmv/openmv,kwagyeman/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv,iabdalkader/openmv,tianzhihen/openmv,SmartArduino/openmv,kwagyeman/openmv,SmartArduino/openmv,kwagyeman/openmv,openmv/openmv,tianzhihen/openmv,openmv/openmv,iabdalkader/openmv,tianzhihen/openmv
|
Add script to do combined tests
|
import sensor, time
# Set sensor gainceiling
sensor.set_gainceiling(16)
# Set sensor brightness
sensor.set_brightness(-2)
# Set sensor to grayscale
sensor.set_pixformat(sensor.GRAYSCALE)
def test_surf(sensor):
clock = time.clock()
for x in range(100):
image = sensor.snapshot()
clock.tick()
kp = image.find_keypoints(upright=False, thresh=0.0004, octaves=2)
image.draw_keypoints(kp)
print (clock.avg())
def test_haar(sensor):
# Load Haar Cascade
cascade = HaarCascade("0:/frontalface_default.cascade")
print(cascade)
clock = time.clock()
for x in range(100):
clock.tick()
image = sensor.snapshot()
objects = image.find_features(cascade)
for r in objects:
image.draw_rectangle(r)
print (clock.fps())
while (True):
test_surf(sensor)
test_haar(sensor)
|
<commit_before><commit_msg>Add script to do combined tests<commit_after>
|
import sensor, time
# Set sensor gainceiling
sensor.set_gainceiling(16)
# Set sensor brightness
sensor.set_brightness(-2)
# Set sensor to grayscale
sensor.set_pixformat(sensor.GRAYSCALE)
def test_surf(sensor):
clock = time.clock()
for x in range(100):
image = sensor.snapshot()
clock.tick()
kp = image.find_keypoints(upright=False, thresh=0.0004, octaves=2)
image.draw_keypoints(kp)
print (clock.avg())
def test_haar(sensor):
# Load Haar Cascade
cascade = HaarCascade("0:/frontalface_default.cascade")
print(cascade)
clock = time.clock()
for x in range(100):
clock.tick()
image = sensor.snapshot()
objects = image.find_features(cascade)
for r in objects:
image.draw_rectangle(r)
print (clock.fps())
while (True):
test_surf(sensor)
test_haar(sensor)
|
Add script to do combined testsimport sensor, time
# Set sensor gainceiling
sensor.set_gainceiling(16)
# Set sensor brightness
sensor.set_brightness(-2)
# Set sensor to grayscale
sensor.set_pixformat(sensor.GRAYSCALE)
def test_surf(sensor):
clock = time.clock()
for x in range(100):
image = sensor.snapshot()
clock.tick()
kp = image.find_keypoints(upright=False, thresh=0.0004, octaves=2)
image.draw_keypoints(kp)
print (clock.avg())
def test_haar(sensor):
# Load Haar Cascade
cascade = HaarCascade("0:/frontalface_default.cascade")
print(cascade)
clock = time.clock()
for x in range(100):
clock.tick()
image = sensor.snapshot()
objects = image.find_features(cascade)
for r in objects:
image.draw_rectangle(r)
print (clock.fps())
while (True):
test_surf(sensor)
test_haar(sensor)
|
<commit_before><commit_msg>Add script to do combined tests<commit_after>import sensor, time
# Set sensor gainceiling
sensor.set_gainceiling(16)
# Set sensor brightness
sensor.set_brightness(-2)
# Set sensor to grayscale
sensor.set_pixformat(sensor.GRAYSCALE)
def test_surf(sensor):
clock = time.clock()
for x in range(100):
image = sensor.snapshot()
clock.tick()
kp = image.find_keypoints(upright=False, thresh=0.0004, octaves=2)
image.draw_keypoints(kp)
print (clock.avg())
def test_haar(sensor):
# Load Haar Cascade
cascade = HaarCascade("0:/frontalface_default.cascade")
print(cascade)
clock = time.clock()
for x in range(100):
clock.tick()
image = sensor.snapshot()
objects = image.find_features(cascade)
for r in objects:
image.draw_rectangle(r)
print (clock.fps())
while (True):
test_surf(sensor)
test_haar(sensor)
|
|
e105b97e7cdbbb6b78f94c6683c82a9e9932c85c
|
benchexec/tools/lctd.py
|
benchexec/tools/lctd.py
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
Add wrapper script for the LCTD verification tool
|
Add wrapper script for the LCTD verification tool
|
Python
|
apache-2.0
|
ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,IljaZakharov/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,martin-neuhaeusser/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec
|
Add wrapper script for the LCTD verification tool
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
<commit_before><commit_msg>Add wrapper script for the LCTD verification tool<commit_after>
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
Add wrapper script for the LCTD verification tool"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
<commit_before><commit_msg>Add wrapper script for the LCTD verification tool<commit_after>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
def executable(self):
return util.find_executable('lctdsvcomp')
def name(self):
return 'LCTD'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
assert len(tasks) == 1
return [executable] + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if "TRUE\n" in output:
status = result.RESULT_TRUE_PROP
elif "FALSE\n" in output:
status = result.RESULT_FALSE_REACH
else:
status = "UNKNOWN"
return status
|
|
30315a030944aae812c7a8f3729fc9c282f5014f
|
llsm-downscale.py
|
llsm-downscale.py
|
# IPython log file
from tqdm import tqdm
import numpy as np
import dask.array as da
import zarr
import itertools
from skimage.transform import downscale_local_mean
lls = da.from_zarr('gokul-lls/aollsm-m4-560nm.zarr')
lls3 = zarr.open(
'gokul-lls/aollsm-m4-560nm-downscale.zarr',
dtype=np.float32,
shape=(199, 201, 192, 256),
chunks=(1, 201, 192, 256),
)
indices = list(itertools.product(range(199), range(201)))
for i, j in tqdm(indices):
lls3[i, j] = downscale_local_mean(np.array(lls[i, j]), (4, 4))
|
Add IPython log for downscaling Gokul's LLS zarr dataset
|
Add IPython log for downscaling Gokul's LLS zarr dataset
|
Python
|
bsd-3-clause
|
jni/useful-histories
|
Add IPython log for downscaling Gokul's LLS zarr dataset
|
# IPython log file
from tqdm import tqdm
import numpy as np
import dask.array as da
import zarr
import itertools
from skimage.transform import downscale_local_mean
lls = da.from_zarr('gokul-lls/aollsm-m4-560nm.zarr')
lls3 = zarr.open(
'gokul-lls/aollsm-m4-560nm-downscale.zarr',
dtype=np.float32,
shape=(199, 201, 192, 256),
chunks=(1, 201, 192, 256),
)
indices = list(itertools.product(range(199), range(201)))
for i, j in tqdm(indices):
lls3[i, j] = downscale_local_mean(np.array(lls[i, j]), (4, 4))
|
<commit_before><commit_msg>Add IPython log for downscaling Gokul's LLS zarr dataset<commit_after>
|
# IPython log file
from tqdm import tqdm
import numpy as np
import dask.array as da
import zarr
import itertools
from skimage.transform import downscale_local_mean
lls = da.from_zarr('gokul-lls/aollsm-m4-560nm.zarr')
lls3 = zarr.open(
'gokul-lls/aollsm-m4-560nm-downscale.zarr',
dtype=np.float32,
shape=(199, 201, 192, 256),
chunks=(1, 201, 192, 256),
)
indices = list(itertools.product(range(199), range(201)))
for i, j in tqdm(indices):
lls3[i, j] = downscale_local_mean(np.array(lls[i, j]), (4, 4))
|
Add IPython log for downscaling Gokul's LLS zarr dataset# IPython log file
from tqdm import tqdm
import numpy as np
import dask.array as da
import zarr
import itertools
from skimage.transform import downscale_local_mean
lls = da.from_zarr('gokul-lls/aollsm-m4-560nm.zarr')
lls3 = zarr.open(
'gokul-lls/aollsm-m4-560nm-downscale.zarr',
dtype=np.float32,
shape=(199, 201, 192, 256),
chunks=(1, 201, 192, 256),
)
indices = list(itertools.product(range(199), range(201)))
for i, j in tqdm(indices):
lls3[i, j] = downscale_local_mean(np.array(lls[i, j]), (4, 4))
|
<commit_before><commit_msg>Add IPython log for downscaling Gokul's LLS zarr dataset<commit_after># IPython log file
from tqdm import tqdm
import numpy as np
import dask.array as da
import zarr
import itertools
from skimage.transform import downscale_local_mean
lls = da.from_zarr('gokul-lls/aollsm-m4-560nm.zarr')
lls3 = zarr.open(
'gokul-lls/aollsm-m4-560nm-downscale.zarr',
dtype=np.float32,
shape=(199, 201, 192, 256),
chunks=(1, 201, 192, 256),
)
indices = list(itertools.product(range(199), range(201)))
for i, j in tqdm(indices):
lls3[i, j] = downscale_local_mean(np.array(lls[i, j]), (4, 4))
|
|
d073a0f3ea6dd7be5c61ac08e4b566601ec211e6
|
makeRegionFile.py
|
makeRegionFile.py
|
'''
A program to turn a catalog file into a ds9 region file
'''
import phot_utils as pu
import Sources as S
def makeRegionFile(filename, outname):
catalog = open(filename, "r")
tmp = filter(lambda line: pu.noHead(line), catalog)
sources = map(lambda line: S.SCAMSource(line), tmp)
out = open(outname, "w")
for source in sources:
out.write("physical;circle(" + str(source.ximg) + "," +
str(source.yimg) + ",2) #color=red" + "\n")
|
Convert a catalog of sources to ds9 region file
|
Convert a catalog of sources to ds9 region file
|
Python
|
mit
|
SAGES-UCSC/Photometry,SAGES-UCSC/Photometry
|
Convert a catalog of sources to ds9 region file
|
'''
A program to turn a catalog file into a ds9 region file
'''
import phot_utils as pu
import Sources as S
def makeRegionFile(filename, outname):
catalog = open(filename, "r")
tmp = filter(lambda line: pu.noHead(line), catalog)
sources = map(lambda line: S.SCAMSource(line), tmp)
out = open(outname, "w")
for source in sources:
out.write("physical;circle(" + str(source.ximg) + "," +
str(source.yimg) + ",2) #color=red" + "\n")
|
<commit_before><commit_msg>Convert a catalog of sources to ds9 region file<commit_after>
|
'''
A program to turn a catalog file into a ds9 region file
'''
import phot_utils as pu
import Sources as S
def makeRegionFile(filename, outname):
catalog = open(filename, "r")
tmp = filter(lambda line: pu.noHead(line), catalog)
sources = map(lambda line: S.SCAMSource(line), tmp)
out = open(outname, "w")
for source in sources:
out.write("physical;circle(" + str(source.ximg) + "," +
str(source.yimg) + ",2) #color=red" + "\n")
|
Convert a catalog of sources to ds9 region file'''
A program to turn a catalog file into a ds9 region file
'''
import phot_utils as pu
import Sources as S
def makeRegionFile(filename, outname):
catalog = open(filename, "r")
tmp = filter(lambda line: pu.noHead(line), catalog)
sources = map(lambda line: S.SCAMSource(line), tmp)
out = open(outname, "w")
for source in sources:
out.write("physical;circle(" + str(source.ximg) + "," +
str(source.yimg) + ",2) #color=red" + "\n")
|
<commit_before><commit_msg>Convert a catalog of sources to ds9 region file<commit_after>'''
A program to turn a catalog file into a ds9 region file
'''
import phot_utils as pu
import Sources as S
def makeRegionFile(filename, outname):
catalog = open(filename, "r")
tmp = filter(lambda line: pu.noHead(line), catalog)
sources = map(lambda line: S.SCAMSource(line), tmp)
out = open(outname, "w")
for source in sources:
out.write("physical;circle(" + str(source.ximg) + "," +
str(source.yimg) + ",2) #color=red" + "\n")
|
|
6ec0b59c3f105f13503acaab691bccf3a6bf70b1
|
test/runtest/testargv.py
|
test/runtest/testargv.py
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test subdir args for runtest.py, for example:
python runtest.py test/subdir
"""
import os
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test', ['test', 'subdir'])
files = {}
files['pythonstring'] = TestRuntest.pythonstring
files['one'] = os.path.join('test/subdir', 'test_one.py')
files['two'] = os.path.join('test/subdir', 'two.py')
files['three'] = os.path.join('test', 'test_three.py')
test.write_passing_test(files['one'])
test.write_passing_test(files['two'])
test.write_passing_test(files['three'])
expect_stdout = """\
%(pythonstring)s -tt %(one)s
PASSING TEST STDOUT
%(pythonstring)s -tt %(two)s
PASSING TEST STDOUT
""" % files
expect_stderr = """\
PASSING TEST STDERR
PASSING TEST STDERR
"""
test.run(arguments = '--no-progress test/subdir',
status = 0,
stdout = expect_stdout,
stderr = expect_stderr)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Add test for "runtest test/somedir" case
|
Add test for "runtest test/somedir" case
|
Python
|
mit
|
andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons
|
Add test for "runtest test/somedir" case
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test subdir args for runtest.py, for example:
python runtest.py test/subdir
"""
import os
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test', ['test', 'subdir'])
files = {}
files['pythonstring'] = TestRuntest.pythonstring
files['one'] = os.path.join('test/subdir', 'test_one.py')
files['two'] = os.path.join('test/subdir', 'two.py')
files['three'] = os.path.join('test', 'test_three.py')
test.write_passing_test(files['one'])
test.write_passing_test(files['two'])
test.write_passing_test(files['three'])
expect_stdout = """\
%(pythonstring)s -tt %(one)s
PASSING TEST STDOUT
%(pythonstring)s -tt %(two)s
PASSING TEST STDOUT
""" % files
expect_stderr = """\
PASSING TEST STDERR
PASSING TEST STDERR
"""
test.run(arguments = '--no-progress test/subdir',
status = 0,
stdout = expect_stdout,
stderr = expect_stderr)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
<commit_before><commit_msg>Add test for "runtest test/somedir" case<commit_after>
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test subdir args for runtest.py, for example:
python runtest.py test/subdir
"""
import os
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test', ['test', 'subdir'])
files = {}
files['pythonstring'] = TestRuntest.pythonstring
files['one'] = os.path.join('test/subdir', 'test_one.py')
files['two'] = os.path.join('test/subdir', 'two.py')
files['three'] = os.path.join('test', 'test_three.py')
test.write_passing_test(files['one'])
test.write_passing_test(files['two'])
test.write_passing_test(files['three'])
expect_stdout = """\
%(pythonstring)s -tt %(one)s
PASSING TEST STDOUT
%(pythonstring)s -tt %(two)s
PASSING TEST STDOUT
""" % files
expect_stderr = """\
PASSING TEST STDERR
PASSING TEST STDERR
"""
test.run(arguments = '--no-progress test/subdir',
status = 0,
stdout = expect_stdout,
stderr = expect_stderr)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Add test for "runtest test/somedir" case#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test subdir args for runtest.py, for example:
python runtest.py test/subdir
"""
import os
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test', ['test', 'subdir'])
files = {}
files['pythonstring'] = TestRuntest.pythonstring
files['one'] = os.path.join('test/subdir', 'test_one.py')
files['two'] = os.path.join('test/subdir', 'two.py')
files['three'] = os.path.join('test', 'test_three.py')
test.write_passing_test(files['one'])
test.write_passing_test(files['two'])
test.write_passing_test(files['three'])
expect_stdout = """\
%(pythonstring)s -tt %(one)s
PASSING TEST STDOUT
%(pythonstring)s -tt %(two)s
PASSING TEST STDOUT
""" % files
expect_stderr = """\
PASSING TEST STDERR
PASSING TEST STDERR
"""
test.run(arguments = '--no-progress test/subdir',
status = 0,
stdout = expect_stdout,
stderr = expect_stderr)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
<commit_before><commit_msg>Add test for "runtest test/somedir" case<commit_after>#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test subdir args for runtest.py, for example:
python runtest.py test/subdir
"""
import os
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test', ['test', 'subdir'])
files = {}
files['pythonstring'] = TestRuntest.pythonstring
files['one'] = os.path.join('test/subdir', 'test_one.py')
files['two'] = os.path.join('test/subdir', 'two.py')
files['three'] = os.path.join('test', 'test_three.py')
test.write_passing_test(files['one'])
test.write_passing_test(files['two'])
test.write_passing_test(files['three'])
expect_stdout = """\
%(pythonstring)s -tt %(one)s
PASSING TEST STDOUT
%(pythonstring)s -tt %(two)s
PASSING TEST STDOUT
""" % files
expect_stderr = """\
PASSING TEST STDERR
PASSING TEST STDERR
"""
test.run(arguments = '--no-progress test/subdir',
status = 0,
stdout = expect_stdout,
stderr = expect_stderr)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
368360640aa31f93cf92290baf313ede0b212584
|
toon/input/keyboard2.py
|
toon/input/keyboard2.py
|
import numpy as np
from toon.input.base_input import BaseInput, DummyTime
class Keyboard(BaseInput):
def __init__(self,
clock_source=DummyTime,
keys=None,
multiprocess=False,
nrow=10):
if keys is None:
raise ValueError('`keys` must be a list of keys of interest.')
BaseInput.__init__(self, clock_source, multiprocess, (10, len(keys)))
self._lenkeys = len(keys)
self._keys = keys
self._buffer = np.full(self._lenkeys, 0)
def _init_device(self):
import keyboard
self._device = keyboard
self._buffer[:] = 0
n = 0
for key in self._keys:
keyboard.add_hotkey(key, self._add_array, (n,), timeout=0.001)
keyboard.add_hotkey(key, self._rem_array, (n,), timeout = 0.001, trigger_on_release=True)
n += 1
def _read(self):
timestamp = self.time.getTime()
return self._buffer, timestamp
def _stop_device(self):
self._device.clear_all_hotkeys()
def _close_device(self):
pass
def _add_array(self, index):
self._buffer[index] = 1
def _rem_array(self, index):
self._buffer[index] = 0
|
Add windows keyboard (works, but needs rate-limiting
|
Add windows keyboard (works, but needs rate-limiting
|
Python
|
mit
|
aforren1/toon
|
Add windows keyboard (works, but needs rate-limiting
|
import numpy as np
from toon.input.base_input import BaseInput, DummyTime
class Keyboard(BaseInput):
def __init__(self,
clock_source=DummyTime,
keys=None,
multiprocess=False,
nrow=10):
if keys is None:
raise ValueError('`keys` must be a list of keys of interest.')
BaseInput.__init__(self, clock_source, multiprocess, (10, len(keys)))
self._lenkeys = len(keys)
self._keys = keys
self._buffer = np.full(self._lenkeys, 0)
def _init_device(self):
import keyboard
self._device = keyboard
self._buffer[:] = 0
n = 0
for key in self._keys:
keyboard.add_hotkey(key, self._add_array, (n,), timeout=0.001)
keyboard.add_hotkey(key, self._rem_array, (n,), timeout = 0.001, trigger_on_release=True)
n += 1
def _read(self):
timestamp = self.time.getTime()
return self._buffer, timestamp
def _stop_device(self):
self._device.clear_all_hotkeys()
def _close_device(self):
pass
def _add_array(self, index):
self._buffer[index] = 1
def _rem_array(self, index):
self._buffer[index] = 0
|
<commit_before><commit_msg>Add windows keyboard (works, but needs rate-limiting<commit_after>
|
import numpy as np
from toon.input.base_input import BaseInput, DummyTime
class Keyboard(BaseInput):
def __init__(self,
clock_source=DummyTime,
keys=None,
multiprocess=False,
nrow=10):
if keys is None:
raise ValueError('`keys` must be a list of keys of interest.')
BaseInput.__init__(self, clock_source, multiprocess, (10, len(keys)))
self._lenkeys = len(keys)
self._keys = keys
self._buffer = np.full(self._lenkeys, 0)
def _init_device(self):
import keyboard
self._device = keyboard
self._buffer[:] = 0
n = 0
for key in self._keys:
keyboard.add_hotkey(key, self._add_array, (n,), timeout=0.001)
keyboard.add_hotkey(key, self._rem_array, (n,), timeout = 0.001, trigger_on_release=True)
n += 1
def _read(self):
timestamp = self.time.getTime()
return self._buffer, timestamp
def _stop_device(self):
self._device.clear_all_hotkeys()
def _close_device(self):
pass
def _add_array(self, index):
self._buffer[index] = 1
def _rem_array(self, index):
self._buffer[index] = 0
|
Add windows keyboard (works, but needs rate-limitingimport numpy as np
from toon.input.base_input import BaseInput, DummyTime
class Keyboard(BaseInput):
def __init__(self,
clock_source=DummyTime,
keys=None,
multiprocess=False,
nrow=10):
if keys is None:
raise ValueError('`keys` must be a list of keys of interest.')
BaseInput.__init__(self, clock_source, multiprocess, (10, len(keys)))
self._lenkeys = len(keys)
self._keys = keys
self._buffer = np.full(self._lenkeys, 0)
def _init_device(self):
import keyboard
self._device = keyboard
self._buffer[:] = 0
n = 0
for key in self._keys:
keyboard.add_hotkey(key, self._add_array, (n,), timeout=0.001)
keyboard.add_hotkey(key, self._rem_array, (n,), timeout = 0.001, trigger_on_release=True)
n += 1
def _read(self):
timestamp = self.time.getTime()
return self._buffer, timestamp
def _stop_device(self):
self._device.clear_all_hotkeys()
def _close_device(self):
pass
def _add_array(self, index):
self._buffer[index] = 1
def _rem_array(self, index):
self._buffer[index] = 0
|
<commit_before><commit_msg>Add windows keyboard (works, but needs rate-limiting<commit_after>import numpy as np
from toon.input.base_input import BaseInput, DummyTime
class Keyboard(BaseInput):
def __init__(self,
clock_source=DummyTime,
keys=None,
multiprocess=False,
nrow=10):
if keys is None:
raise ValueError('`keys` must be a list of keys of interest.')
BaseInput.__init__(self, clock_source, multiprocess, (10, len(keys)))
self._lenkeys = len(keys)
self._keys = keys
self._buffer = np.full(self._lenkeys, 0)
def _init_device(self):
import keyboard
self._device = keyboard
self._buffer[:] = 0
n = 0
for key in self._keys:
keyboard.add_hotkey(key, self._add_array, (n,), timeout=0.001)
keyboard.add_hotkey(key, self._rem_array, (n,), timeout = 0.001, trigger_on_release=True)
n += 1
def _read(self):
timestamp = self.time.getTime()
return self._buffer, timestamp
def _stop_device(self):
self._device.clear_all_hotkeys()
def _close_device(self):
pass
def _add_array(self, index):
self._buffer[index] = 1
def _rem_array(self, index):
self._buffer[index] = 0
|
|
1e385a0d1ef396249ac626e242ae6c1140d8b7a0
|
tests/unit/test_files.py
|
tests/unit/test_files.py
|
from tests import PMGTestCase
import datetime
import pytz
from pmg.models import db, CommitteeMeeting, Event, EventFile, File, House, Committee
from tests.fixtures import dbfixture, CommitteeData, CommitteeMeetingData, EventData
# TODO: might have to mock S3
class TestFiles(PMGTestCase):
def setUp(self):
super().setUp()
self.house = House(name="National Assembly", name_short="NA", sphere="national")
self.committee = Committee(
name="Communications", house=self.house, premium=True
)
self.committee_meeting = CommitteeMeeting(
date=datetime.datetime(2019, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
title="Public meeting One",
committee=self.committee,
)
self.file = File(file_path="test-path.pdf")
self.event_file = EventFile(event=self.committee_meeting, file=self.file)
db.session.add(self.house)
db.session.add(self.committee)
db.session.add(self.committee_meeting)
db.session.add(self.event_file)
db.session.commit()
self.file_id = self.file.id
def test_delete_file_when_linked_to_meeting(self):
# When we delete the file, the event should be deleted too, but
# the meeting shouldn't be deleted
db.session.delete(self.file)
db.session.commit()
event_file = EventFile.query.filter_by(file_id=self.file_id).first()
self.assertIsNone(event_file)
committee_meeting = CommitteeMeeting.query.filter_by(
id=self.committee_meeting.id
).first()
self.assertIsNotNone(committee_meeting)
|
Add test for file deletion
|
Add test for file deletion
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
Add test for file deletion
|
from tests import PMGTestCase
import datetime
import pytz
from pmg.models import db, CommitteeMeeting, Event, EventFile, File, House, Committee
from tests.fixtures import dbfixture, CommitteeData, CommitteeMeetingData, EventData
# TODO: might have to mock S3
class TestFiles(PMGTestCase):
def setUp(self):
super().setUp()
self.house = House(name="National Assembly", name_short="NA", sphere="national")
self.committee = Committee(
name="Communications", house=self.house, premium=True
)
self.committee_meeting = CommitteeMeeting(
date=datetime.datetime(2019, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
title="Public meeting One",
committee=self.committee,
)
self.file = File(file_path="test-path.pdf")
self.event_file = EventFile(event=self.committee_meeting, file=self.file)
db.session.add(self.house)
db.session.add(self.committee)
db.session.add(self.committee_meeting)
db.session.add(self.event_file)
db.session.commit()
self.file_id = self.file.id
def test_delete_file_when_linked_to_meeting(self):
# When we delete the file, the event should be deleted too, but
# the meeting shouldn't be deleted
db.session.delete(self.file)
db.session.commit()
event_file = EventFile.query.filter_by(file_id=self.file_id).first()
self.assertIsNone(event_file)
committee_meeting = CommitteeMeeting.query.filter_by(
id=self.committee_meeting.id
).first()
self.assertIsNotNone(committee_meeting)
|
<commit_before><commit_msg>Add test for file deletion<commit_after>
|
from tests import PMGTestCase
import datetime
import pytz
from pmg.models import db, CommitteeMeeting, Event, EventFile, File, House, Committee
from tests.fixtures import dbfixture, CommitteeData, CommitteeMeetingData, EventData
# TODO: might have to mock S3
class TestFiles(PMGTestCase):
def setUp(self):
super().setUp()
self.house = House(name="National Assembly", name_short="NA", sphere="national")
self.committee = Committee(
name="Communications", house=self.house, premium=True
)
self.committee_meeting = CommitteeMeeting(
date=datetime.datetime(2019, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
title="Public meeting One",
committee=self.committee,
)
self.file = File(file_path="test-path.pdf")
self.event_file = EventFile(event=self.committee_meeting, file=self.file)
db.session.add(self.house)
db.session.add(self.committee)
db.session.add(self.committee_meeting)
db.session.add(self.event_file)
db.session.commit()
self.file_id = self.file.id
def test_delete_file_when_linked_to_meeting(self):
# When we delete the file, the event should be deleted too, but
# the meeting shouldn't be deleted
db.session.delete(self.file)
db.session.commit()
event_file = EventFile.query.filter_by(file_id=self.file_id).first()
self.assertIsNone(event_file)
committee_meeting = CommitteeMeeting.query.filter_by(
id=self.committee_meeting.id
).first()
self.assertIsNotNone(committee_meeting)
|
Add test for file deletionfrom tests import PMGTestCase
import datetime
import pytz
from pmg.models import db, CommitteeMeeting, Event, EventFile, File, House, Committee
from tests.fixtures import dbfixture, CommitteeData, CommitteeMeetingData, EventData
# TODO: might have to mock S3
class TestFiles(PMGTestCase):
def setUp(self):
super().setUp()
self.house = House(name="National Assembly", name_short="NA", sphere="national")
self.committee = Committee(
name="Communications", house=self.house, premium=True
)
self.committee_meeting = CommitteeMeeting(
date=datetime.datetime(2019, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
title="Public meeting One",
committee=self.committee,
)
self.file = File(file_path="test-path.pdf")
self.event_file = EventFile(event=self.committee_meeting, file=self.file)
db.session.add(self.house)
db.session.add(self.committee)
db.session.add(self.committee_meeting)
db.session.add(self.event_file)
db.session.commit()
self.file_id = self.file.id
def test_delete_file_when_linked_to_meeting(self):
# When we delete the file, the event should be deleted too, but
# the meeting shouldn't be deleted
db.session.delete(self.file)
db.session.commit()
event_file = EventFile.query.filter_by(file_id=self.file_id).first()
self.assertIsNone(event_file)
committee_meeting = CommitteeMeeting.query.filter_by(
id=self.committee_meeting.id
).first()
self.assertIsNotNone(committee_meeting)
|
<commit_before><commit_msg>Add test for file deletion<commit_after>from tests import PMGTestCase
import datetime
import pytz
from pmg.models import db, CommitteeMeeting, Event, EventFile, File, House, Committee
from tests.fixtures import dbfixture, CommitteeData, CommitteeMeetingData, EventData
# TODO: might have to mock S3
class TestFiles(PMGTestCase):
def setUp(self):
super().setUp()
self.house = House(name="National Assembly", name_short="NA", sphere="national")
self.committee = Committee(
name="Communications", house=self.house, premium=True
)
self.committee_meeting = CommitteeMeeting(
date=datetime.datetime(2019, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
title="Public meeting One",
committee=self.committee,
)
self.file = File(file_path="test-path.pdf")
self.event_file = EventFile(event=self.committee_meeting, file=self.file)
db.session.add(self.house)
db.session.add(self.committee)
db.session.add(self.committee_meeting)
db.session.add(self.event_file)
db.session.commit()
self.file_id = self.file.id
def test_delete_file_when_linked_to_meeting(self):
# When we delete the file, the event should be deleted too, but
# the meeting shouldn't be deleted
db.session.delete(self.file)
db.session.commit()
event_file = EventFile.query.filter_by(file_id=self.file_id).first()
self.assertIsNone(event_file)
committee_meeting = CommitteeMeeting.query.filter_by(
id=self.committee_meeting.id
).first()
self.assertIsNotNone(committee_meeting)
|
|
e377be96d4d928c83ad91b5fad9a3c465ecf6b1b
|
zerver/management/commands/realm_filters.py
|
zerver/management/commands/realm_filters.py
|
from __future__ import absolute_import
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, Realm
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py realm_filters foo.com PATTERN URLPATTERN
Example: python manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python manage.py realm_filters --realm=zulip.com --op=show
"""
option_list = BaseCommand.option_list + (
make_option('-r', '--realm',
dest='domain',
type='str',
help='The name of the realm to adjust filters for.'),
make_option('--op',
dest='op',
type='str',
default="show",
help='What operation to do (add, show, remove).'),
)
def handle(self, *args, **options):
if "domain" not in options:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
realm = Realm.objects.get(domain=options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
sys.exit(0)
if not args:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
pattern = args[0]
if options["op"] == "add":
url_format_string = args[1]
RealmFilter(realm=realm, pattern=pattern,
url_format_string=url_format_string).save()
sys.exit(0)
elif options["op"] == "remove":
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
|
Add a management command to set realm filters.
|
Add a management command to set realm filters.
(imported from commit 6590d7eb58ea6e6afc15104dd75ad56b832858fa)
|
Python
|
apache-2.0
|
Jianchun1/zulip,easyfmxu/zulip,m1ssou/zulip,moria/zulip,LeeRisk/zulip,eastlhu/zulip,kou/zulip,RobotCaleb/zulip,bluesea/zulip,atomic-labs/zulip,xuanhan863/zulip,Qgap/zulip,willingc/zulip,natanovia/zulip,voidException/zulip,stamhe/zulip,blaze225/zulip,Qgap/zulip,Vallher/zulip,mohsenSy/zulip,grave-w-grave/zulip,proliming/zulip,qq1012803704/zulip,SmartPeople/zulip,zwily/zulip,adnanh/zulip,rishig/zulip,blaze225/zulip,zacps/zulip,sup95/zulip,codeKonami/zulip,luyifan/zulip,easyfmxu/zulip,LeeRisk/zulip,tommyip/zulip,sonali0901/zulip,samatdav/zulip,thomasboyt/zulip,jphilipsen05/zulip,wavelets/zulip,jimmy54/zulip,babbage/zulip,kou/zulip,gkotian/zulip,sup95/zulip,hengqujushi/zulip,dotcool/zulip,jrowan/zulip,bluesea/zulip,j831/zulip,kaiyuanheshang/zulip,zachallaun/zulip,alliejones/zulip,saitodisse/zulip,ashwinirudrappa/zulip,atomic-labs/zulip,niftynei/zulip,MayB/zulip,amyliu345/zulip,glovebx/zulip,codeKonami/zulip,amallia/zulip,zachallaun/zulip,jimmy54/zulip,hayderimran7/zulip,wdaher/zulip,hustlzp/zulip,Frouk/zulip,arpith/zulip,esander91/zulip,jimmy54/zulip,hustlzp/zulip,peguin40/zulip,noroot/zulip,LAndreas/zulip,rht/zulip,arpitpanwar/zulip,ashwinirudrappa/zulip,schatt/zulip,fw1121/zulip,Galexrt/zulip,christi3k/zulip,luyifan/zulip,SmartPeople/zulip,aliceriot/zulip,xuanhan863/zulip,firstblade/zulip,voidException/zulip,kaiyuanheshang/zulip,PaulPetring/zulip,littledogboy/zulip,he15his/zulip,aakash-cr7/zulip,peiwei/zulip,TigorC/zulip,ericzhou2008/zulip,Drooids/zulip,jeffcao/zulip,timabbott/zulip,synicalsyntax/zulip,dhcrzf/zulip,Juanvulcano/zulip,brockwhittaker/zulip,arpitpanwar/zulip,armooo/zulip,hafeez3000/zulip,babbage/zulip,natanovia/zulip,synicalsyntax/zulip,shrikrishnaholla/zulip,qq1012803704/zulip,dxq-git/zulip,bluesea/zulip,sonali0901/zulip,aps-sids/zulip,Juanvulcano/zulip,vabs22/zulip,aps-sids/zulip,zhaoweigg/zulip,noroot/zulip,dattatreya303/zulip,codeKonami/zulip,gkotian/zulip,aps-sids/zulip,jessedhillon/zulip,deer-hope/zulip,natanovia/zulip,bowlofstew/zulip,zhaoweigg/zulip,johnny9/zulip,TigorC/zulip,dhcrzf/zulip,vabs22/zulip,ufosky-server/zulip,MayB/zulip,amanharitsh123/zulip,nicholasbs/zulip,tommyip/zulip,levixie/zulip,arpith/zulip,tdr130/zulip,jerryge/zulip,susansls/zulip,hafeez3000/zulip,PhilSk/zulip,aakash-cr7/zulip,dxq-git/zulip,Cheppers/zulip,ashwinirudrappa/zulip,ApsOps/zulip,yuvipanda/zulip,Diptanshu8/zulip,jphilipsen05/zulip,karamcnair/zulip,mansilladev/zulip,souravbadami/zulip,lfranchi/zulip,brainwane/zulip,firstblade/zulip,JanzTam/zulip,mdavid/zulip,eastlhu/zulip,saitodisse/zulip,dwrpayne/zulip,zachallaun/zulip,johnny9/zulip,deer-hope/zulip,SmartPeople/zulip,hengqujushi/zulip,stamhe/zulip,hj3938/zulip,shrikrishnaholla/zulip,adnanh/zulip,sup95/zulip,susansls/zulip,so0k/zulip,Qgap/zulip,dxq-git/zulip,paxapy/zulip,dotcool/zulip,joyhchen/zulip,timabbott/zulip,vakila/zulip,dotcool/zulip,wangdeshui/zulip,voidException/zulip,m1ssou/zulip,Juanvulcano/zulip,aliceriot/zulip,shubhamdhama/zulip,thomasboyt/zulip,Drooids/zulip,atomic-labs/zulip,shaunstanislaus/zulip,bssrdf/zulip,karamcnair/zulip,joshisa/zulip,calvinleenyc/zulip,LeeRisk/zulip,vaidap/zulip,sharmaeklavya2/zulip,souravbadami/zulip,kou/zulip,niftynei/zulip,KJin99/zulip,brainwane/zulip,arpith/zulip,Batterfii/zulip,hj3938/zulip,babbage/zulip,showell/zulip,nicholasbs/zulip,jackrzhang/zulip,ipernet/zulip,pradiptad/zulip,technicalpickles/zulip,timabbott/zulip,ufosky-server/zulip,KingxBanana/zulip,ahmadassaf/zulip,ashwinirudrappa/zulip,joshisa/zulip,developerfm/zulip,udxxabp/zulip,natanovia/zulip,rishig/zulip,brainwane/zulip,jerryge/zulip,johnnygaddarr/zulip,zacps/zulip,udxxabp/zulip,MayB/zulip,jonesgithub/zulip,zwily/zulip,tbutter/zulip,easyfmxu/zulip,tbutter/zulip,thomasboyt/zulip,sonali0901/zulip,niftynei/zulip,ashwinirudrappa/zulip,shaunstanislaus/zulip,armooo/zulip,Diptanshu8/zulip,mdavid/zulip,swinghu/zulip,hafeez3000/zulip,sup95/zulip,Vallher/zulip,souravbadami/zulip,Suninus/zulip,voidException/zulip,umkay/zulip,developerfm/zulip,mahim97/zulip,suxinde2009/zulip,joyhchen/zulip,shubhamdhama/zulip,RobotCaleb/zulip,arpitpanwar/zulip,zhaoweigg/zulip,joyhchen/zulip,LeeRisk/zulip,aakash-cr7/zulip,wweiradio/zulip,dattatreya303/zulip,eeshangarg/zulip,vakila/zulip,wangdeshui/zulip,guiquanz/zulip,shaunstanislaus/zulip,arpith/zulip,JanzTam/zulip,bastianh/zulip,luyifan/zulip,bluesea/zulip,vakila/zulip,wdaher/zulip,guiquanz/zulip,Galexrt/zulip,wavelets/zulip,willingc/zulip,tbutter/zulip,m1ssou/zulip,m1ssou/zulip,Gabriel0402/zulip,eastlhu/zulip,showell/zulip,bastianh/zulip,aps-sids/zulip,joshisa/zulip,showell/zulip,so0k/zulip,dhcrzf/zulip,dwrpayne/zulip,andersk/zulip,karamcnair/zulip,vakila/zulip,amallia/zulip,zhaoweigg/zulip,TigorC/zulip,shubhamdhama/zulip,yuvipanda/zulip,kokoar/zulip,zorojean/zulip,sharmaeklavya2/zulip,Gabriel0402/zulip,voidException/zulip,shaunstanislaus/zulip,jeffcao/zulip,jphilipsen05/zulip,zachallaun/zulip,xuxiao/zulip,zacps/zulip,KingxBanana/zulip,kou/zulip,verma-varsha/zulip,rht/zulip,punchagan/zulip,j831/zulip,so0k/zulip,umkay/zulip,suxinde2009/zulip,eeshangarg/zulip,deer-hope/zulip,amyliu345/zulip,nicholasbs/zulip,cosmicAsymmetry/zulip,mahim97/zulip,eeshangarg/zulip,stamhe/zulip,amyliu345/zulip,kaiyuanheshang/zulip,vikas-parashar/zulip,suxinde2009/zulip,zorojean/zulip,tbutter/zulip,SmartPeople/zulip,levixie/zulip,ipernet/zulip,jainayush975/zulip,jerryge/zulip,brainwane/zulip,ryansnowboarder/zulip,bssrdf/zulip,technicalpickles/zulip,amallia/zulip,amyliu345/zulip,bowlofstew/zulip,jessedhillon/zulip,zacps/zulip,seapasulli/zulip,avastu/zulip,dxq-git/zulip,karamcnair/zulip,krtkmj/zulip,blaze225/zulip,praveenaki/zulip,xuxiao/zulip,ahmadassaf/zulip,blaze225/zulip,seapasulli/zulip,niftynei/zulip,so0k/zulip,gigawhitlocks/zulip,adnanh/zulip,wavelets/zulip,bastianh/zulip,technicalpickles/zulip,RobotCaleb/zulip,ericzhou2008/zulip,tommyip/zulip,PaulPetring/zulip,zachallaun/zulip,kou/zulip,souravbadami/zulip,synicalsyntax/zulip,ryansnowboarder/zulip,jackrzhang/zulip,ryansnowboarder/zulip,EasonYi/zulip,xuxiao/zulip,Suninus/zulip,MayB/zulip,Cheppers/zulip,vaidap/zulip,KingxBanana/zulip,samatdav/zulip,Qgap/zulip,wweiradio/zulip,wdaher/zulip,vikas-parashar/zulip,niftynei/zulip,JPJPJPOPOP/zulip,zofuthan/zulip,grave-w-grave/zulip,ryansnowboarder/zulip,schatt/zulip,reyha/zulip,hafeez3000/zulip,xuanhan863/zulip,sup95/zulip,akuseru/zulip,dhcrzf/zulip,atomic-labs/zulip,KJin99/zulip,amyliu345/zulip,ufosky-server/zulip,huangkebo/zulip,hj3938/zulip,tbutter/zulip,stamhe/zulip,eastlhu/zulip,eastlhu/zulip,grave-w-grave/zulip,cosmicAsymmetry/zulip,saitodisse/zulip,Gabriel0402/zulip,eeshangarg/zulip,dnmfarrell/zulip,gigawhitlocks/zulip,zhaoweigg/zulip,hackerkid/zulip,xuanhan863/zulip,jainayush975/zulip,JPJPJPOPOP/zulip,yuvipanda/zulip,xuanhan863/zulip,xuxiao/zulip,dwrpayne/zulip,Jianchun1/zulip,Gabriel0402/zulip,LAndreas/zulip,amanharitsh123/zulip,peguin40/zulip,eeshangarg/zulip,tiansiyuan/zulip,nicholasbs/zulip,atomic-labs/zulip,zofuthan/zulip,shubhamdhama/zulip,johnny9/zulip,peiwei/zulip,lfranchi/zulip,shrikrishnaholla/zulip,hackerkid/zulip,armooo/zulip,paxapy/zulip,saitodisse/zulip,JanzTam/zulip,ikasumiwt/zulip,voidException/zulip,pradiptad/zulip,jonesgithub/zulip,glovebx/zulip,bowlofstew/zulip,jrowan/zulip,andersk/zulip,punchagan/zulip,dwrpayne/zulip,johnnygaddarr/zulip,reyha/zulip,mansilladev/zulip,Suninus/zulip,isht3/zulip,littledogboy/zulip,arpith/zulip,swinghu/zulip,ashwinirudrappa/zulip,xuxiao/zulip,mahim97/zulip,samatdav/zulip,glovebx/zulip,aliceriot/zulip,jonesgithub/zulip,AZtheAsian/zulip,ikasumiwt/zulip,vabs22/zulip,calvinleenyc/zulip,peiwei/zulip,showell/zulip,wweiradio/zulip,bluesea/zulip,aps-sids/zulip,swinghu/zulip,zwily/zulip,tiansiyuan/zulip,reyha/zulip,udxxabp/zulip,Vallher/zulip,vaidap/zulip,synicalsyntax/zulip,akuseru/zulip,wangdeshui/zulip,yuvipanda/zulip,hackerkid/zulip,huangkebo/zulip,synicalsyntax/zulip,ahmadassaf/zulip,jimmy54/zulip,eastlhu/zulip,atomic-labs/zulip,rht/zulip,littledogboy/zulip,jainayush975/zulip,Frouk/zulip,KJin99/zulip,zofuthan/zulip,wavelets/zulip,bluesea/zulip,avastu/zulip,KJin99/zulip,PhilSk/zulip,jonesgithub/zulip,littledogboy/zulip,wweiradio/zulip,ipernet/zulip,easyfmxu/zulip,Batterfii/zulip,zulip/zulip,ApsOps/zulip,kaiyuanheshang/zulip,he15his/zulip,jessedhillon/zulip,calvinleenyc/zulip,paxapy/zulip,JanzTam/zulip,udxxabp/zulip,itnihao/zulip,hustlzp/zulip,krtkmj/zulip,zofuthan/zulip,tdr130/zulip,mansilladev/zulip,yuvipanda/zulip,johnny9/zulip,stamhe/zulip,qq1012803704/zulip,firstblade/zulip,xuxiao/zulip,DazWorrall/zulip,mohsenSy/zulip,zacps/zulip,souravbadami/zulip,esander91/zulip,Batterfii/zulip,bitemyapp/zulip,arpitpanwar/zulip,wavelets/zulip,dattatreya303/zulip,j831/zulip,dattatreya303/zulip,gigawhitlocks/zulip,MariaFaBella85/zulip,punchagan/zulip,tiansiyuan/zulip,shaunstanislaus/zulip,arpith/zulip,rishig/zulip,bowlofstew/zulip,developerfm/zulip,zhaoweigg/zulip,pradiptad/zulip,huangkebo/zulip,LAndreas/zulip,jrowan/zulip,praveenaki/zulip,reyha/zulip,moria/zulip,zacps/zulip,wweiradio/zulip,cosmicAsymmetry/zulip,punchagan/zulip,wangdeshui/zulip,gkotian/zulip,nicholasbs/zulip,jimmy54/zulip,paxapy/zulip,amyliu345/zulip,guiquanz/zulip,johnnygaddarr/zulip,JPJPJPOPOP/zulip,zulip/zulip,themass/zulip,punchagan/zulip,blaze225/zulip,TigorC/zulip,zofuthan/zulip,zorojean/zulip,praveenaki/zulip,grave-w-grave/zulip,fw1121/zulip,Suninus/zulip,dnmfarrell/zulip,vakila/zulip,luyifan/zulip,hayderimran7/zulip,Frouk/zulip,zhaoweigg/zulip,codeKonami/zulip,EasonYi/zulip,itnihao/zulip,lfranchi/zulip,mansilladev/zulip,ikasumiwt/zulip,bastianh/zulip,ikasumiwt/zulip,yocome/zulip,akuseru/zulip,hustlzp/zulip,krtkmj/zulip,glovebx/zulip,bowlofstew/zulip,joyhchen/zulip,showell/zulip,lfranchi/zulip,johnny9/zulip,hustlzp/zulip,LeeRisk/zulip,littledogboy/zulip,easyfmxu/zulip,levixie/zulip,seapasulli/zulip,wweiradio/zulip,ApsOps/zulip,arpitpanwar/zulip,jessedhillon/zulip,vabs22/zulip,ryansnowboarder/zulip,developerfm/zulip,krtkmj/zulip,kou/zulip,tdr130/zulip,JanzTam/zulip,wangdeshui/zulip,proliming/zulip,Batterfii/zulip,shrikrishnaholla/zulip,vikas-parashar/zulip,vabs22/zulip,firstblade/zulip,niftynei/zulip,bluesea/zulip,glovebx/zulip,ericzhou2008/zulip,mdavid/zulip,zulip/zulip,adnanh/zulip,amanharitsh123/zulip,mohsenSy/zulip,stamhe/zulip,susansls/zulip,aps-sids/zulip,mahim97/zulip,esander91/zulip,eeshangarg/zulip,qq1012803704/zulip,peguin40/zulip,Drooids/zulip,aakash-cr7/zulip,zorojean/zulip,dawran6/zulip,punchagan/zulip,rht/zulip,vaidap/zulip,peguin40/zulip,shrikrishnaholla/zulip,cosmicAsymmetry/zulip,rht/zulip,aliceriot/zulip,dnmfarrell/zulip,eeshangarg/zulip,christi3k/zulip,peiwei/zulip,schatt/zulip,yocome/zulip,tiansiyuan/zulip,amallia/zulip,qq1012803704/zulip,ryanbackman/zulip,Juanvulcano/zulip,technicalpickles/zulip,huangkebo/zulip,isht3/zulip,themass/zulip,ahmadassaf/zulip,christi3k/zulip,levixie/zulip,yocome/zulip,developerfm/zulip,yocome/zulip,blaze225/zulip,zachallaun/zulip,susansls/zulip,grave-w-grave/zulip,jphilipsen05/zulip,grave-w-grave/zulip,moria/zulip,j831/zulip,hj3938/zulip,kokoar/zulip,udxxabp/zulip,Gabriel0402/zulip,mdavid/zulip,KingxBanana/zulip,dattatreya303/zulip,vikas-parashar/zulip,zorojean/zulip,verma-varsha/zulip,ahmadassaf/zulip,willingc/zulip,jackrzhang/zulip,technicalpickles/zulip,j831/zulip,Drooids/zulip,dwrpayne/zulip,praveenaki/zulip,rht/zulip,firstblade/zulip,thomasboyt/zulip,EasonYi/zulip,tommyip/zulip,itnihao/zulip,ufosky-server/zulip,mdavid/zulip,samatdav/zulip,christi3k/zulip,zorojean/zulip,shubhamdhama/zulip,jackrzhang/zulip,MariaFaBella85/zulip,peiwei/zulip,firstblade/zulip,jphilipsen05/zulip,AZtheAsian/zulip,eastlhu/zulip,ahmadassaf/zulip,fw1121/zulip,Juanvulcano/zulip,PaulPetring/zulip,lfranchi/zulip,Batterfii/zulip,Vallher/zulip,proliming/zulip,PhilSk/zulip,tdr130/zulip,dotcool/zulip,Jianchun1/zulip,praveenaki/zulip,peiwei/zulip,Frouk/zulip,calvinleenyc/zulip,punchagan/zulip,pradiptad/zulip,ryanbackman/zulip,jonesgithub/zulip,KingxBanana/zulip,akuseru/zulip,dnmfarrell/zulip,dotcool/zulip,sharmaeklavya2/zulip,jrowan/zulip,jerryge/zulip,ryanbackman/zulip,levixie/zulip,he15his/zulip,hengqujushi/zulip,ericzhou2008/zulip,wangdeshui/zulip,sharmaeklavya2/zulip,bastianh/zulip,EasonYi/zulip,willingc/zulip,jackrzhang/zulip,Cheppers/zulip,jessedhillon/zulip,RobotCaleb/zulip,jainayush975/zulip,amallia/zulip,JPJPJPOPOP/zulip,schatt/zulip,deer-hope/zulip,andersk/zulip,alliejones/zulip,mohsenSy/zulip,DazWorrall/zulip,dotcool/zulip,Galexrt/zulip,proliming/zulip,brainwane/zulip,jackrzhang/zulip,swinghu/zulip,wavelets/zulip,susansls/zulip,thomasboyt/zulip,hj3938/zulip,Jianchun1/zulip,codeKonami/zulip,hengqujushi/zulip,RobotCaleb/zulip,bowlofstew/zulip,Frouk/zulip,synicalsyntax/zulip,Cheppers/zulip,huangkebo/zulip,vabs22/zulip,zulip/zulip,hackerkid/zulip,ipernet/zulip,cosmicAsymmetry/zulip,isht3/zulip,dnmfarrell/zulip,reyha/zulip,joshisa/zulip,MariaFaBella85/zulip,PhilSk/zulip,willingc/zulip,ericzhou2008/zulip,DazWorrall/zulip,lfranchi/zulip,Galexrt/zulip,karamcnair/zulip,ryanbackman/zulip,schatt/zulip,johnny9/zulip,tdr130/zulip,mahim97/zulip,moria/zulip,brainwane/zulip,ApsOps/zulip,dnmfarrell/zulip,ikasumiwt/zulip,aakash-cr7/zulip,zofuthan/zulip,luyifan/zulip,verma-varsha/zulip,he15his/zulip,themass/zulip,atomic-labs/zulip,saitodisse/zulip,tiansiyuan/zulip,tbutter/zulip,technicalpickles/zulip,umkay/zulip,tiansiyuan/zulip,ahmadassaf/zulip,Frouk/zulip,dhcrzf/zulip,Vallher/zulip,jerryge/zulip,aps-sids/zulip,luyifan/zulip,Drooids/zulip,udxxabp/zulip,armooo/zulip,DazWorrall/zulip,kokoar/zulip,seapasulli/zulip,bssrdf/zulip,natanovia/zulip,so0k/zulip,bssrdf/zulip,JPJPJPOPOP/zulip,rishig/zulip,KJin99/zulip,Jianchun1/zulip,Gabriel0402/zulip,dxq-git/zulip,gkotian/zulip,samatdav/zulip,so0k/zulip,Galexrt/zulip,avastu/zulip,codeKonami/zulip,bitemyapp/zulip,bastianh/zulip,avastu/zulip,hafeez3000/zulip,brockwhittaker/zulip,wdaher/zulip,esander91/zulip,zulip/zulip,esander91/zulip,levixie/zulip,shaunstanislaus/zulip,krtkmj/zulip,jessedhillon/zulip,pradiptad/zulip,jonesgithub/zulip,vikas-parashar/zulip,jackrzhang/zulip,glovebx/zulip,moria/zulip,akuseru/zulip,guiquanz/zulip,Qgap/zulip,calvinleenyc/zulip,ipernet/zulip,willingc/zulip,SmartPeople/zulip,deer-hope/zulip,thomasboyt/zulip,jainayush975/zulip,tiansiyuan/zulip,gigawhitlocks/zulip,m1ssou/zulip,shrikrishnaholla/zulip,ryansnowboarder/zulip,ericzhou2008/zulip,krtkmj/zulip,mohsenSy/zulip,mansilladev/zulip,jainayush975/zulip,Drooids/zulip,yuvipanda/zulip,hustlzp/zulip,bitemyapp/zulip,dawran6/zulip,rishig/zulip,zofuthan/zulip,andersk/zulip,rishig/zulip,KJin99/zulip,bssrdf/zulip,tbutter/zulip,ApsOps/zulip,christi3k/zulip,noroot/zulip,armooo/zulip,bitemyapp/zulip,swinghu/zulip,ipernet/zulip,babbage/zulip,johnnygaddarr/zulip,zachallaun/zulip,verma-varsha/zulip,gkotian/zulip,gigawhitlocks/zulip,souravbadami/zulip,zwily/zulip,thomasboyt/zulip,noroot/zulip,sharmaeklavya2/zulip,guiquanz/zulip,stamhe/zulip,joyhchen/zulip,littledogboy/zulip,so0k/zulip,dhcrzf/zulip,Diptanshu8/zulip,mansilladev/zulip,isht3/zulip,ApsOps/zulip,gkotian/zulip,technicalpickles/zulip,AZtheAsian/zulip,guiquanz/zulip,EasonYi/zulip,mdavid/zulip,mdavid/zulip,shrikrishnaholla/zulip,EasonYi/zulip,m1ssou/zulip,aakash-cr7/zulip,itnihao/zulip,JanzTam/zulip,andersk/zulip,avastu/zulip,ipernet/zulip,ericzhou2008/zulip,peguin40/zulip,hayderimran7/zulip,verma-varsha/zulip,dawran6/zulip,kaiyuanheshang/zulip,Jianchun1/zulip,LeeRisk/zulip,ashwinirudrappa/zulip,TigorC/zulip,umkay/zulip,dwrpayne/zulip,dwrpayne/zulip,jrowan/zulip,Batterfii/zulip,joshisa/zulip,Qgap/zulip,timabbott/zulip,umkay/zulip,PaulPetring/zulip,noroot/zulip,Vallher/zulip,proliming/zulip,johnnygaddarr/zulip,vaidap/zulip,wdaher/zulip,developerfm/zulip,shaunstanislaus/zulip,gkotian/zulip,hengqujushi/zulip,Cheppers/zulip,joshisa/zulip,alliejones/zulip,showell/zulip,zwily/zulip,dxq-git/zulip,yuvipanda/zulip,ryanbackman/zulip,itnihao/zulip,dawran6/zulip,amanharitsh123/zulip,babbage/zulip,paxapy/zulip,verma-varsha/zulip,sonali0901/zulip,themass/zulip,natanovia/zulip,Qgap/zulip,LAndreas/zulip,christi3k/zulip,yocome/zulip,Galexrt/zulip,babbage/zulip,RobotCaleb/zulip,qq1012803704/zulip,levixie/zulip,bssrdf/zulip,MariaFaBella85/zulip,noroot/zulip,akuseru/zulip,EasonYi/zulip,jerryge/zulip,saitodisse/zulip,fw1121/zulip,timabbott/zulip,zwily/zulip,isht3/zulip,proliming/zulip,Suninus/zulip,easyfmxu/zulip,Drooids/zulip,amanharitsh123/zulip,Suninus/zulip,praveenaki/zulip,pradiptad/zulip,Cheppers/zulip,brockwhittaker/zulip,joyhchen/zulip,timabbott/zulip,tommyip/zulip,sonali0901/zulip,wdaher/zulip,gigawhitlocks/zulip,xuanhan863/zulip,peiwei/zulip,ufosky-server/zulip,hafeez3000/zulip,karamcnair/zulip,guiquanz/zulip,krtkmj/zulip,ryanbackman/zulip,AZtheAsian/zulip,rht/zulip,SmartPeople/zulip,hackerkid/zulip,amanharitsh123/zulip,sonali0901/zulip,jimmy54/zulip,hj3938/zulip,alliejones/zulip,avastu/zulip,kaiyuanheshang/zulip,vakila/zulip,firstblade/zulip,hayderimran7/zulip,seapasulli/zulip,ryansnowboarder/zulip,suxinde2009/zulip,hengqujushi/zulip,jeffcao/zulip,brainwane/zulip,MayB/zulip,udxxabp/zulip,nicholasbs/zulip,dawran6/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,Diptanshu8/zulip,tommyip/zulip,proliming/zulip,paxapy/zulip,susansls/zulip,andersk/zulip,suxinde2009/zulip,kaiyuanheshang/zulip,yocome/zulip,timabbott/zulip,hayderimran7/zulip,synicalsyntax/zulip,babbage/zulip,adnanh/zulip,Diptanshu8/zulip,alliejones/zulip,gigawhitlocks/zulip,themass/zulip,peguin40/zulip,bowlofstew/zulip,hustlzp/zulip,arpitpanwar/zulip,xuxiao/zulip,itnihao/zulip,schatt/zulip,mohsenSy/zulip,brockwhittaker/zulip,jrowan/zulip,jeffcao/zulip,rishig/zulip,themass/zulip,cosmicAsymmetry/zulip,brockwhittaker/zulip,jphilipsen05/zulip,zulip/zulip,vakila/zulip,fw1121/zulip,noroot/zulip,kokoar/zulip,zwily/zulip,dhcrzf/zulip,aliceriot/zulip,ufosky-server/zulip,ufosky-server/zulip,littledogboy/zulip,hafeez3000/zulip,LAndreas/zulip,hackerkid/zulip,he15his/zulip,deer-hope/zulip,arpitpanwar/zulip,jimmy54/zulip,yocome/zulip,esander91/zulip,nicholasbs/zulip,akuseru/zulip,swinghu/zulip,aliceriot/zulip,MariaFaBella85/zulip,deer-hope/zulip,Juanvulcano/zulip,wweiradio/zulip,wangdeshui/zulip,vaidap/zulip,mansilladev/zulip,calvinleenyc/zulip,johnnygaddarr/zulip,seapasulli/zulip,alliejones/zulip,PhilSk/zulip,avastu/zulip,hayderimran7/zulip,ikasumiwt/zulip,zulip/zulip,JanzTam/zulip,huangkebo/zulip,swinghu/zulip,easyfmxu/zulip,bastianh/zulip,MayB/zulip,Frouk/zulip,he15his/zulip,vikas-parashar/zulip,themass/zulip,kokoar/zulip,DazWorrall/zulip,andersk/zulip,Diptanshu8/zulip,jeffcao/zulip,joshisa/zulip,moria/zulip,qq1012803704/zulip,showell/zulip,zorojean/zulip,dawran6/zulip,LAndreas/zulip,KJin99/zulip,bitemyapp/zulip,huangkebo/zulip,sharmaeklavya2/zulip,he15his/zulip,wavelets/zulip,Suninus/zulip,alliejones/zulip,kokoar/zulip,dattatreya303/zulip,PaulPetring/zulip,glovebx/zulip,MayB/zulip,brockwhittaker/zulip,itnihao/zulip,bitemyapp/zulip,PaulPetring/zulip,lfranchi/zulip,j831/zulip,TigorC/zulip,umkay/zulip,esander91/zulip,Vallher/zulip,shubhamdhama/zulip,DazWorrall/zulip,ApsOps/zulip,fw1121/zulip,m1ssou/zulip,codeKonami/zulip,Cheppers/zulip,developerfm/zulip,Gabriel0402/zulip,dotcool/zulip,PaulPetring/zulip,pradiptad/zulip,saitodisse/zulip,moria/zulip,MariaFaBella85/zulip,jerryge/zulip,umkay/zulip,seapasulli/zulip,suxinde2009/zulip,dnmfarrell/zulip,mahim97/zulip,tdr130/zulip,DazWorrall/zulip,ikasumiwt/zulip,aliceriot/zulip,schatt/zulip,LAndreas/zulip,LeeRisk/zulip,PhilSk/zulip,tdr130/zulip,voidException/zulip,adnanh/zulip,johnnygaddarr/zulip,luyifan/zulip,xuanhan863/zulip,jeffcao/zulip,AZtheAsian/zulip,amallia/zulip,hj3938/zulip,MariaFaBella85/zulip,bitemyapp/zulip,RobotCaleb/zulip,reyha/zulip,kokoar/zulip,hayderimran7/zulip,Galexrt/zulip,fw1121/zulip,sup95/zulip,tommyip/zulip,suxinde2009/zulip,praveenaki/zulip,wdaher/zulip,kou/zulip,bssrdf/zulip,samatdav/zulip,armooo/zulip,johnny9/zulip,hengqujushi/zulip,adnanh/zulip,KingxBanana/zulip,Batterfii/zulip,amallia/zulip,hackerkid/zulip,karamcnair/zulip,armooo/zulip,natanovia/zulip,AZtheAsian/zulip,shubhamdhama/zulip,jonesgithub/zulip,isht3/zulip,dxq-git/zulip,willingc/zulip,jeffcao/zulip
|
Add a management command to set realm filters.
(imported from commit 6590d7eb58ea6e6afc15104dd75ad56b832858fa)
|
from __future__ import absolute_import
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, Realm
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py realm_filters foo.com PATTERN URLPATTERN
Example: python manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python manage.py realm_filters --realm=zulip.com --op=show
"""
option_list = BaseCommand.option_list + (
make_option('-r', '--realm',
dest='domain',
type='str',
help='The name of the realm to adjust filters for.'),
make_option('--op',
dest='op',
type='str',
default="show",
help='What operation to do (add, show, remove).'),
)
def handle(self, *args, **options):
if "domain" not in options:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
realm = Realm.objects.get(domain=options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
sys.exit(0)
if not args:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
pattern = args[0]
if options["op"] == "add":
url_format_string = args[1]
RealmFilter(realm=realm, pattern=pattern,
url_format_string=url_format_string).save()
sys.exit(0)
elif options["op"] == "remove":
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
|
<commit_before><commit_msg>Add a management command to set realm filters.
(imported from commit 6590d7eb58ea6e6afc15104dd75ad56b832858fa)<commit_after>
|
from __future__ import absolute_import
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, Realm
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py realm_filters foo.com PATTERN URLPATTERN
Example: python manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python manage.py realm_filters --realm=zulip.com --op=show
"""
option_list = BaseCommand.option_list + (
make_option('-r', '--realm',
dest='domain',
type='str',
help='The name of the realm to adjust filters for.'),
make_option('--op',
dest='op',
type='str',
default="show",
help='What operation to do (add, show, remove).'),
)
def handle(self, *args, **options):
if "domain" not in options:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
realm = Realm.objects.get(domain=options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
sys.exit(0)
if not args:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
pattern = args[0]
if options["op"] == "add":
url_format_string = args[1]
RealmFilter(realm=realm, pattern=pattern,
url_format_string=url_format_string).save()
sys.exit(0)
elif options["op"] == "remove":
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
|
Add a management command to set realm filters.
(imported from commit 6590d7eb58ea6e6afc15104dd75ad56b832858fa)from __future__ import absolute_import
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, Realm
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py realm_filters foo.com PATTERN URLPATTERN
Example: python manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python manage.py realm_filters --realm=zulip.com --op=show
"""
option_list = BaseCommand.option_list + (
make_option('-r', '--realm',
dest='domain',
type='str',
help='The name of the realm to adjust filters for.'),
make_option('--op',
dest='op',
type='str',
default="show",
help='What operation to do (add, show, remove).'),
)
def handle(self, *args, **options):
if "domain" not in options:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
realm = Realm.objects.get(domain=options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
sys.exit(0)
if not args:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
pattern = args[0]
if options["op"] == "add":
url_format_string = args[1]
RealmFilter(realm=realm, pattern=pattern,
url_format_string=url_format_string).save()
sys.exit(0)
elif options["op"] == "remove":
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
|
<commit_before><commit_msg>Add a management command to set realm filters.
(imported from commit 6590d7eb58ea6e6afc15104dd75ad56b832858fa)<commit_after>from __future__ import absolute_import
from optparse import make_option
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, all_realm_filters, Realm
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py realm_filters foo.com PATTERN URLPATTERN
Example: python manage.py realm_filters --realm=zulip.com --op=add '#(?P<id>[0-9]{2,8})' 'https://trac.humbughq.com/ticket/%(id)s'
Example: python manage.py realm_filters --realm=zulip.com --op=remove '#(?P<id>[0-9]{2,8})'
Example: python manage.py realm_filters --realm=zulip.com --op=show
"""
option_list = BaseCommand.option_list + (
make_option('-r', '--realm',
dest='domain',
type='str',
help='The name of the realm to adjust filters for.'),
make_option('--op',
dest='op',
type='str',
default="show",
help='What operation to do (add, show, remove).'),
)
def handle(self, *args, **options):
if "domain" not in options:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
realm = Realm.objects.get(domain=options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
sys.exit(0)
if not args:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
pattern = args[0]
if options["op"] == "add":
url_format_string = args[1]
RealmFilter(realm=realm, pattern=pattern,
url_format_string=url_format_string).save()
sys.exit(0)
elif options["op"] == "remove":
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_filters")
sys.exit(1)
|
|
075f0342a1b03882af8e6afbe4c4debeb1c35093
|
graph/plot-char.py
|
graph/plot-char.py
|
from bluepy.btle import *
from binascii import *
import matplotlib.pyplot as plt
import numpy as np
DEVICE_UUID = '00:0B:57:0B:F6:F6'
x = np.arange(100)
y = np.ones(100) * 560
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
h1, = ax.plot(x, y, 'r-')
def updateLine(h1, newData):
global y
print newData
# h1.set_xdata(numpy.append(h1.get_xdata(), newData))
y = np.append(y, newData)
y = y[1:]
h1.set_ydata(y)
#ax.relim()
# ax.set_ylim(500, 1000)
ax.set_ylim(600, 3300)
# ax.set_ylim(y.mean() - np.std(y) * 5, y.mean() + np.std(y) * 5)
# ax.set_ylim(y.mean() - y.mean()/2, y.mean() + y.mean()/2)
ax.autoscale_view()
fig.canvas.draw()
fig.canvas.flush_events()
#plt.draw()
p = Peripheral(DEVICE_UUID)
chars = p.getCharacteristics()
while(True):
val = chars[5].read()
ival = int(hexlify(val[1]+val[0]), 16)
updateLine(h1, ival)
p.disconnect()
#x = np.linspace(0, 6*np.pi, 100)
#y = np.sin(x)
#
## You probably won't need this if you're embedding things in a tkinter plot...
#plt.ion()
#
#fig = plt.figure()
#ax = fig.add_subplot(111)
#line1, = ax.plot(x, y, 'r-') # Returns a tuple of line objects, thus the comma
#
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# fig.canvas.draw()#
|
Add python code for graphing
|
Add python code for graphing
|
Python
|
apache-2.0
|
Miceuz/ScaleHack,Miceuz/ScaleHack
|
Add python code for graphing
|
from bluepy.btle import *
from binascii import *
import matplotlib.pyplot as plt
import numpy as np
DEVICE_UUID = '00:0B:57:0B:F6:F6'
x = np.arange(100)
y = np.ones(100) * 560
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
h1, = ax.plot(x, y, 'r-')
def updateLine(h1, newData):
global y
print newData
# h1.set_xdata(numpy.append(h1.get_xdata(), newData))
y = np.append(y, newData)
y = y[1:]
h1.set_ydata(y)
#ax.relim()
# ax.set_ylim(500, 1000)
ax.set_ylim(600, 3300)
# ax.set_ylim(y.mean() - np.std(y) * 5, y.mean() + np.std(y) * 5)
# ax.set_ylim(y.mean() - y.mean()/2, y.mean() + y.mean()/2)
ax.autoscale_view()
fig.canvas.draw()
fig.canvas.flush_events()
#plt.draw()
p = Peripheral(DEVICE_UUID)
chars = p.getCharacteristics()
while(True):
val = chars[5].read()
ival = int(hexlify(val[1]+val[0]), 16)
updateLine(h1, ival)
p.disconnect()
#x = np.linspace(0, 6*np.pi, 100)
#y = np.sin(x)
#
## You probably won't need this if you're embedding things in a tkinter plot...
#plt.ion()
#
#fig = plt.figure()
#ax = fig.add_subplot(111)
#line1, = ax.plot(x, y, 'r-') # Returns a tuple of line objects, thus the comma
#
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# fig.canvas.draw()#
|
<commit_before><commit_msg>Add python code for graphing<commit_after>
|
from bluepy.btle import *
from binascii import *
import matplotlib.pyplot as plt
import numpy as np
DEVICE_UUID = '00:0B:57:0B:F6:F6'
x = np.arange(100)
y = np.ones(100) * 560
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
h1, = ax.plot(x, y, 'r-')
def updateLine(h1, newData):
global y
print newData
# h1.set_xdata(numpy.append(h1.get_xdata(), newData))
y = np.append(y, newData)
y = y[1:]
h1.set_ydata(y)
#ax.relim()
# ax.set_ylim(500, 1000)
ax.set_ylim(600, 3300)
# ax.set_ylim(y.mean() - np.std(y) * 5, y.mean() + np.std(y) * 5)
# ax.set_ylim(y.mean() - y.mean()/2, y.mean() + y.mean()/2)
ax.autoscale_view()
fig.canvas.draw()
fig.canvas.flush_events()
#plt.draw()
p = Peripheral(DEVICE_UUID)
chars = p.getCharacteristics()
while(True):
val = chars[5].read()
ival = int(hexlify(val[1]+val[0]), 16)
updateLine(h1, ival)
p.disconnect()
#x = np.linspace(0, 6*np.pi, 100)
#y = np.sin(x)
#
## You probably won't need this if you're embedding things in a tkinter plot...
#plt.ion()
#
#fig = plt.figure()
#ax = fig.add_subplot(111)
#line1, = ax.plot(x, y, 'r-') # Returns a tuple of line objects, thus the comma
#
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# fig.canvas.draw()#
|
Add python code for graphingfrom bluepy.btle import *
from binascii import *
import matplotlib.pyplot as plt
import numpy as np
DEVICE_UUID = '00:0B:57:0B:F6:F6'
x = np.arange(100)
y = np.ones(100) * 560
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
h1, = ax.plot(x, y, 'r-')
def updateLine(h1, newData):
global y
print newData
# h1.set_xdata(numpy.append(h1.get_xdata(), newData))
y = np.append(y, newData)
y = y[1:]
h1.set_ydata(y)
#ax.relim()
# ax.set_ylim(500, 1000)
ax.set_ylim(600, 3300)
# ax.set_ylim(y.mean() - np.std(y) * 5, y.mean() + np.std(y) * 5)
# ax.set_ylim(y.mean() - y.mean()/2, y.mean() + y.mean()/2)
ax.autoscale_view()
fig.canvas.draw()
fig.canvas.flush_events()
#plt.draw()
p = Peripheral(DEVICE_UUID)
chars = p.getCharacteristics()
while(True):
val = chars[5].read()
ival = int(hexlify(val[1]+val[0]), 16)
updateLine(h1, ival)
p.disconnect()
#x = np.linspace(0, 6*np.pi, 100)
#y = np.sin(x)
#
## You probably won't need this if you're embedding things in a tkinter plot...
#plt.ion()
#
#fig = plt.figure()
#ax = fig.add_subplot(111)
#line1, = ax.plot(x, y, 'r-') # Returns a tuple of line objects, thus the comma
#
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# fig.canvas.draw()#
|
<commit_before><commit_msg>Add python code for graphing<commit_after>from bluepy.btle import *
from binascii import *
import matplotlib.pyplot as plt
import numpy as np
DEVICE_UUID = '00:0B:57:0B:F6:F6'
x = np.arange(100)
y = np.ones(100) * 560
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
h1, = ax.plot(x, y, 'r-')
def updateLine(h1, newData):
global y
print newData
# h1.set_xdata(numpy.append(h1.get_xdata(), newData))
y = np.append(y, newData)
y = y[1:]
h1.set_ydata(y)
#ax.relim()
# ax.set_ylim(500, 1000)
ax.set_ylim(600, 3300)
# ax.set_ylim(y.mean() - np.std(y) * 5, y.mean() + np.std(y) * 5)
# ax.set_ylim(y.mean() - y.mean()/2, y.mean() + y.mean()/2)
ax.autoscale_view()
fig.canvas.draw()
fig.canvas.flush_events()
#plt.draw()
p = Peripheral(DEVICE_UUID)
chars = p.getCharacteristics()
while(True):
val = chars[5].read()
ival = int(hexlify(val[1]+val[0]), 16)
updateLine(h1, ival)
p.disconnect()
#x = np.linspace(0, 6*np.pi, 100)
#y = np.sin(x)
#
## You probably won't need this if you're embedding things in a tkinter plot...
#plt.ion()
#
#fig = plt.figure()
#ax = fig.add_subplot(111)
#line1, = ax.plot(x, y, 'r-') # Returns a tuple of line objects, thus the comma
#
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# fig.canvas.draw()#
|
|
cfcba59e0d5cf01f731f6996b42547b7143a3fd4
|
beer_search_v2/signals.py
|
beer_search_v2/signals.py
|
from django.db import IntegrityError
from random import sample
from beer_search_v2.models import Country, Brewery, SimplifiedStyle, ProductType, Product
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils.text import slugify
from markdown import markdown
from datetime import date
@receiver(pre_save, sender=Country)
def capitalize_country_name(sender, instance, **kwargs):
instance.name = instance.name[0].upper() + instance.name[:1]
@receiver(pre_save, sender=Brewery)
def initialize_brewery_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=SimplifiedStyle)
def update_simplifiedstyle_examples(sender, instance, **kwargs):
# Finding all product types associated with a particular simplified style
product_types = set()
untappd_styles = instance.untappdstyle_set.all()
for untappd_style in untappd_styles:
for entity in untappd_style.untappdentity_set.all():
for product_type in entity.producttype_set.filter(available=True).all():
product_types.add(product_type)
# Randomly picking five of those product types
sample_size = min(5, len(product_types))
examples = sample(product_types, sample_size)
instance.examples.clear()
for example in examples:
instance.examples.add(example)
instance.slug = slugify(instance.name)
instance.html_description = markdown(instance.description)
@receiver(pre_save, sender=ProductType)
def initialize_producttype_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=ProductType)
def initialize_producttype_image(sender, instance, **kwargs):
if not instance.main_image:
instance.update_image_url()
@receiver(pre_save, sender=Product)
def clean_product(sender, instance, **kwargs):
instance.name = instance.name.strip() # For everyone's sanity
instance.updated_at = date.today() # Automatic updates
if not instance.image_url:
instance.attempt_image_fetch()
@receiver(post_save, sender=Product)
def validate_product(sender, instance, created, **kwargs):
# ToDo: Test this logic
max_duplicates_allowed = 1 # Products should not share non-falsey identifiers
if instance.atvr_id:
if Product.objects.filter(atvr_id=instance.atvr_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given ÁTVR ID already exists")
if instance.jog_id:
if Product.objects.filter(jog_id=instance.jog_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given Járn og Gler ID already exists")
|
Allow for the post_save signal already knowing of the object at hand
|
Allow for the post_save signal already knowing of the object at hand
|
Python
|
mit
|
Ernir/bjorleitin,Ernir/bjorleitin,Ernir/bjorleitin,Ernir/bjorleitin
|
Allow for the post_save signal already knowing of the object at hand
|
from django.db import IntegrityError
from random import sample
from beer_search_v2.models import Country, Brewery, SimplifiedStyle, ProductType, Product
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils.text import slugify
from markdown import markdown
from datetime import date
@receiver(pre_save, sender=Country)
def capitalize_country_name(sender, instance, **kwargs):
instance.name = instance.name[0].upper() + instance.name[:1]
@receiver(pre_save, sender=Brewery)
def initialize_brewery_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=SimplifiedStyle)
def update_simplifiedstyle_examples(sender, instance, **kwargs):
# Finding all product types associated with a particular simplified style
product_types = set()
untappd_styles = instance.untappdstyle_set.all()
for untappd_style in untappd_styles:
for entity in untappd_style.untappdentity_set.all():
for product_type in entity.producttype_set.filter(available=True).all():
product_types.add(product_type)
# Randomly picking five of those product types
sample_size = min(5, len(product_types))
examples = sample(product_types, sample_size)
instance.examples.clear()
for example in examples:
instance.examples.add(example)
instance.slug = slugify(instance.name)
instance.html_description = markdown(instance.description)
@receiver(pre_save, sender=ProductType)
def initialize_producttype_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=ProductType)
def initialize_producttype_image(sender, instance, **kwargs):
if not instance.main_image:
instance.update_image_url()
@receiver(pre_save, sender=Product)
def clean_product(sender, instance, **kwargs):
instance.name = instance.name.strip() # For everyone's sanity
instance.updated_at = date.today() # Automatic updates
if not instance.image_url:
instance.attempt_image_fetch()
@receiver(post_save, sender=Product)
def validate_product(sender, instance, created, **kwargs):
# ToDo: Test this logic
max_duplicates_allowed = 1 # Products should not share non-falsey identifiers
if instance.atvr_id:
if Product.objects.filter(atvr_id=instance.atvr_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given ÁTVR ID already exists")
if instance.jog_id:
if Product.objects.filter(jog_id=instance.jog_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given Járn og Gler ID already exists")
|
<commit_before><commit_msg>Allow for the post_save signal already knowing of the object at hand<commit_after>
|
from django.db import IntegrityError
from random import sample
from beer_search_v2.models import Country, Brewery, SimplifiedStyle, ProductType, Product
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils.text import slugify
from markdown import markdown
from datetime import date
@receiver(pre_save, sender=Country)
def capitalize_country_name(sender, instance, **kwargs):
instance.name = instance.name[0].upper() + instance.name[:1]
@receiver(pre_save, sender=Brewery)
def initialize_brewery_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=SimplifiedStyle)
def update_simplifiedstyle_examples(sender, instance, **kwargs):
# Finding all product types associated with a particular simplified style
product_types = set()
untappd_styles = instance.untappdstyle_set.all()
for untappd_style in untappd_styles:
for entity in untappd_style.untappdentity_set.all():
for product_type in entity.producttype_set.filter(available=True).all():
product_types.add(product_type)
# Randomly picking five of those product types
sample_size = min(5, len(product_types))
examples = sample(product_types, sample_size)
instance.examples.clear()
for example in examples:
instance.examples.add(example)
instance.slug = slugify(instance.name)
instance.html_description = markdown(instance.description)
@receiver(pre_save, sender=ProductType)
def initialize_producttype_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=ProductType)
def initialize_producttype_image(sender, instance, **kwargs):
if not instance.main_image:
instance.update_image_url()
@receiver(pre_save, sender=Product)
def clean_product(sender, instance, **kwargs):
instance.name = instance.name.strip() # For everyone's sanity
instance.updated_at = date.today() # Automatic updates
if not instance.image_url:
instance.attempt_image_fetch()
@receiver(post_save, sender=Product)
def validate_product(sender, instance, created, **kwargs):
# ToDo: Test this logic
max_duplicates_allowed = 1 # Products should not share non-falsey identifiers
if instance.atvr_id:
if Product.objects.filter(atvr_id=instance.atvr_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given ÁTVR ID already exists")
if instance.jog_id:
if Product.objects.filter(jog_id=instance.jog_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given Járn og Gler ID already exists")
|
Allow for the post_save signal already knowing of the object at handfrom django.db import IntegrityError
from random import sample
from beer_search_v2.models import Country, Brewery, SimplifiedStyle, ProductType, Product
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils.text import slugify
from markdown import markdown
from datetime import date
@receiver(pre_save, sender=Country)
def capitalize_country_name(sender, instance, **kwargs):
instance.name = instance.name[0].upper() + instance.name[:1]
@receiver(pre_save, sender=Brewery)
def initialize_brewery_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=SimplifiedStyle)
def update_simplifiedstyle_examples(sender, instance, **kwargs):
# Finding all product types associated with a particular simplified style
product_types = set()
untappd_styles = instance.untappdstyle_set.all()
for untappd_style in untappd_styles:
for entity in untappd_style.untappdentity_set.all():
for product_type in entity.producttype_set.filter(available=True).all():
product_types.add(product_type)
# Randomly picking five of those product types
sample_size = min(5, len(product_types))
examples = sample(product_types, sample_size)
instance.examples.clear()
for example in examples:
instance.examples.add(example)
instance.slug = slugify(instance.name)
instance.html_description = markdown(instance.description)
@receiver(pre_save, sender=ProductType)
def initialize_producttype_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=ProductType)
def initialize_producttype_image(sender, instance, **kwargs):
if not instance.main_image:
instance.update_image_url()
@receiver(pre_save, sender=Product)
def clean_product(sender, instance, **kwargs):
instance.name = instance.name.strip() # For everyone's sanity
instance.updated_at = date.today() # Automatic updates
if not instance.image_url:
instance.attempt_image_fetch()
@receiver(post_save, sender=Product)
def validate_product(sender, instance, created, **kwargs):
# ToDo: Test this logic
max_duplicates_allowed = 1 # Products should not share non-falsey identifiers
if instance.atvr_id:
if Product.objects.filter(atvr_id=instance.atvr_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given ÁTVR ID already exists")
if instance.jog_id:
if Product.objects.filter(jog_id=instance.jog_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given Járn og Gler ID already exists")
|
<commit_before><commit_msg>Allow for the post_save signal already knowing of the object at hand<commit_after>from django.db import IntegrityError
from random import sample
from beer_search_v2.models import Country, Brewery, SimplifiedStyle, ProductType, Product
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils.text import slugify
from markdown import markdown
from datetime import date
@receiver(pre_save, sender=Country)
def capitalize_country_name(sender, instance, **kwargs):
instance.name = instance.name[0].upper() + instance.name[:1]
@receiver(pre_save, sender=Brewery)
def initialize_brewery_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=SimplifiedStyle)
def update_simplifiedstyle_examples(sender, instance, **kwargs):
# Finding all product types associated with a particular simplified style
product_types = set()
untappd_styles = instance.untappdstyle_set.all()
for untappd_style in untappd_styles:
for entity in untappd_style.untappdentity_set.all():
for product_type in entity.producttype_set.filter(available=True).all():
product_types.add(product_type)
# Randomly picking five of those product types
sample_size = min(5, len(product_types))
examples = sample(product_types, sample_size)
instance.examples.clear()
for example in examples:
instance.examples.add(example)
instance.slug = slugify(instance.name)
instance.html_description = markdown(instance.description)
@receiver(pre_save, sender=ProductType)
def initialize_producttype_alias(sender, instance, **kwargs):
if not instance.alias:
instance.alias = instance.name
@receiver(pre_save, sender=ProductType)
def initialize_producttype_image(sender, instance, **kwargs):
if not instance.main_image:
instance.update_image_url()
@receiver(pre_save, sender=Product)
def clean_product(sender, instance, **kwargs):
instance.name = instance.name.strip() # For everyone's sanity
instance.updated_at = date.today() # Automatic updates
if not instance.image_url:
instance.attempt_image_fetch()
@receiver(post_save, sender=Product)
def validate_product(sender, instance, created, **kwargs):
# ToDo: Test this logic
max_duplicates_allowed = 1 # Products should not share non-falsey identifiers
if instance.atvr_id:
if Product.objects.filter(atvr_id=instance.atvr_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given ÁTVR ID already exists")
if instance.jog_id:
if Product.objects.filter(jog_id=instance.jog_id).count() > max_duplicates_allowed:
raise IntegrityError("Product with given Járn og Gler ID already exists")
|
|
b855384437b3f1a392e2d3e19f91e0675bac1885
|
35-dumb-filters/tf-35.py
|
35-dumb-filters/tf-35.py
|
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import plot_model
import numpy as np
import sys, os, string
characters = string.printable
char_indices = dict((c, i) for i, c in enumerate(characters))
indices_char = dict((i, c) for i, c in enumerate(characters))
INPUT_VOCAB_SIZE = len(characters)
LINE_SIZE = 100
def encode_one_hot(line):
x = np.zeros((len(line), INPUT_VOCAB_SIZE))
for i, c in enumerate(line):
if c in characters:
index = char_indices[c]
else:
index = char_indices[' ']
x[i][index] = 1
return x
def decode_one_hot(x):
s = []
for onehot in x:
one_index = np.argmax(onehot)
s.append(indices_char[one_index])
return ''.join(s)
def normalization_layer_set_weights(n_layer):
wb = []
b = np.zeros((INPUT_VOCAB_SIZE), dtype=np.float32)
w = np.zeros((INPUT_VOCAB_SIZE, INPUT_VOCAB_SIZE), dtype=np.float32)
# Let lower case letters go through
for c in string.ascii_lowercase:
i = char_indices[c]
w[i, i] = 1
# Map capitals to lower case
for c in string.ascii_uppercase:
i = char_indices[c]
il = char_indices[c.lower()]
w[i, il] = 1
# Map all non-letters to space
sp_idx = char_indices[' ']
for c in [c for c in list(string.printable) if c not in list(string.ascii_letters)]:
i = char_indices[c]
w[i, sp_idx] = 1
wb.append(w)
wb.append(b)
n_layer.set_weights(wb)
return n_layer
def build_model():
# Normalize characters using a dense layer
model = Sequential()
dense_layer = Dense(INPUT_VOCAB_SIZE, input_shape=(INPUT_VOCAB_SIZE,))
model.add(dense_layer)
normalization_layer_set_weights(dense_layer)
return model
model = build_model()
with open(sys.argv[1]) as f:
for line in f:
if line.isspace(): continue
batch = encode_one_hot(line)
preds = model.predict(batch)
normal = decode_one_hot(preds)
print(normal)
|
Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.
|
Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.
|
Python
|
mit
|
kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style
|
Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.
|
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import plot_model
import numpy as np
import sys, os, string
characters = string.printable
char_indices = dict((c, i) for i, c in enumerate(characters))
indices_char = dict((i, c) for i, c in enumerate(characters))
INPUT_VOCAB_SIZE = len(characters)
LINE_SIZE = 100
def encode_one_hot(line):
x = np.zeros((len(line), INPUT_VOCAB_SIZE))
for i, c in enumerate(line):
if c in characters:
index = char_indices[c]
else:
index = char_indices[' ']
x[i][index] = 1
return x
def decode_one_hot(x):
s = []
for onehot in x:
one_index = np.argmax(onehot)
s.append(indices_char[one_index])
return ''.join(s)
def normalization_layer_set_weights(n_layer):
wb = []
b = np.zeros((INPUT_VOCAB_SIZE), dtype=np.float32)
w = np.zeros((INPUT_VOCAB_SIZE, INPUT_VOCAB_SIZE), dtype=np.float32)
# Let lower case letters go through
for c in string.ascii_lowercase:
i = char_indices[c]
w[i, i] = 1
# Map capitals to lower case
for c in string.ascii_uppercase:
i = char_indices[c]
il = char_indices[c.lower()]
w[i, il] = 1
# Map all non-letters to space
sp_idx = char_indices[' ']
for c in [c for c in list(string.printable) if c not in list(string.ascii_letters)]:
i = char_indices[c]
w[i, sp_idx] = 1
wb.append(w)
wb.append(b)
n_layer.set_weights(wb)
return n_layer
def build_model():
# Normalize characters using a dense layer
model = Sequential()
dense_layer = Dense(INPUT_VOCAB_SIZE, input_shape=(INPUT_VOCAB_SIZE,))
model.add(dense_layer)
normalization_layer_set_weights(dense_layer)
return model
model = build_model()
with open(sys.argv[1]) as f:
for line in f:
if line.isspace(): continue
batch = encode_one_hot(line)
preds = model.predict(batch)
normal = decode_one_hot(preds)
print(normal)
|
<commit_before><commit_msg>Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.<commit_after>
|
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import plot_model
import numpy as np
import sys, os, string
characters = string.printable
char_indices = dict((c, i) for i, c in enumerate(characters))
indices_char = dict((i, c) for i, c in enumerate(characters))
INPUT_VOCAB_SIZE = len(characters)
LINE_SIZE = 100
def encode_one_hot(line):
x = np.zeros((len(line), INPUT_VOCAB_SIZE))
for i, c in enumerate(line):
if c in characters:
index = char_indices[c]
else:
index = char_indices[' ']
x[i][index] = 1
return x
def decode_one_hot(x):
s = []
for onehot in x:
one_index = np.argmax(onehot)
s.append(indices_char[one_index])
return ''.join(s)
def normalization_layer_set_weights(n_layer):
wb = []
b = np.zeros((INPUT_VOCAB_SIZE), dtype=np.float32)
w = np.zeros((INPUT_VOCAB_SIZE, INPUT_VOCAB_SIZE), dtype=np.float32)
# Let lower case letters go through
for c in string.ascii_lowercase:
i = char_indices[c]
w[i, i] = 1
# Map capitals to lower case
for c in string.ascii_uppercase:
i = char_indices[c]
il = char_indices[c.lower()]
w[i, il] = 1
# Map all non-letters to space
sp_idx = char_indices[' ']
for c in [c for c in list(string.printable) if c not in list(string.ascii_letters)]:
i = char_indices[c]
w[i, sp_idx] = 1
wb.append(w)
wb.append(b)
n_layer.set_weights(wb)
return n_layer
def build_model():
# Normalize characters using a dense layer
model = Sequential()
dense_layer = Dense(INPUT_VOCAB_SIZE, input_shape=(INPUT_VOCAB_SIZE,))
model.add(dense_layer)
normalization_layer_set_weights(dense_layer)
return model
model = build_model()
with open(sys.argv[1]) as f:
for line in f:
if line.isspace(): continue
batch = encode_one_hot(line)
preds = model.predict(batch)
normal = decode_one_hot(preds)
print(normal)
|
Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.from keras.models import Sequential
from keras.layers import Dense
from keras.utils import plot_model
import numpy as np
import sys, os, string
characters = string.printable
char_indices = dict((c, i) for i, c in enumerate(characters))
indices_char = dict((i, c) for i, c in enumerate(characters))
INPUT_VOCAB_SIZE = len(characters)
LINE_SIZE = 100
def encode_one_hot(line):
x = np.zeros((len(line), INPUT_VOCAB_SIZE))
for i, c in enumerate(line):
if c in characters:
index = char_indices[c]
else:
index = char_indices[' ']
x[i][index] = 1
return x
def decode_one_hot(x):
s = []
for onehot in x:
one_index = np.argmax(onehot)
s.append(indices_char[one_index])
return ''.join(s)
def normalization_layer_set_weights(n_layer):
wb = []
b = np.zeros((INPUT_VOCAB_SIZE), dtype=np.float32)
w = np.zeros((INPUT_VOCAB_SIZE, INPUT_VOCAB_SIZE), dtype=np.float32)
# Let lower case letters go through
for c in string.ascii_lowercase:
i = char_indices[c]
w[i, i] = 1
# Map capitals to lower case
for c in string.ascii_uppercase:
i = char_indices[c]
il = char_indices[c.lower()]
w[i, il] = 1
# Map all non-letters to space
sp_idx = char_indices[' ']
for c in [c for c in list(string.printable) if c not in list(string.ascii_letters)]:
i = char_indices[c]
w[i, sp_idx] = 1
wb.append(w)
wb.append(b)
n_layer.set_weights(wb)
return n_layer
def build_model():
# Normalize characters using a dense layer
model = Sequential()
dense_layer = Dense(INPUT_VOCAB_SIZE, input_shape=(INPUT_VOCAB_SIZE,))
model.add(dense_layer)
normalization_layer_set_weights(dense_layer)
return model
model = build_model()
with open(sys.argv[1]) as f:
for line in f:
if line.isspace(): continue
batch = encode_one_hot(line)
preds = model.predict(batch)
normal = decode_one_hot(preds)
print(normal)
|
<commit_before><commit_msg>Add a version of this that works on single characters, and batches on the line. It's much simpler to explain.<commit_after>from keras.models import Sequential
from keras.layers import Dense
from keras.utils import plot_model
import numpy as np
import sys, os, string
characters = string.printable
char_indices = dict((c, i) for i, c in enumerate(characters))
indices_char = dict((i, c) for i, c in enumerate(characters))
INPUT_VOCAB_SIZE = len(characters)
LINE_SIZE = 100
def encode_one_hot(line):
x = np.zeros((len(line), INPUT_VOCAB_SIZE))
for i, c in enumerate(line):
if c in characters:
index = char_indices[c]
else:
index = char_indices[' ']
x[i][index] = 1
return x
def decode_one_hot(x):
s = []
for onehot in x:
one_index = np.argmax(onehot)
s.append(indices_char[one_index])
return ''.join(s)
def normalization_layer_set_weights(n_layer):
wb = []
b = np.zeros((INPUT_VOCAB_SIZE), dtype=np.float32)
w = np.zeros((INPUT_VOCAB_SIZE, INPUT_VOCAB_SIZE), dtype=np.float32)
# Let lower case letters go through
for c in string.ascii_lowercase:
i = char_indices[c]
w[i, i] = 1
# Map capitals to lower case
for c in string.ascii_uppercase:
i = char_indices[c]
il = char_indices[c.lower()]
w[i, il] = 1
# Map all non-letters to space
sp_idx = char_indices[' ']
for c in [c for c in list(string.printable) if c not in list(string.ascii_letters)]:
i = char_indices[c]
w[i, sp_idx] = 1
wb.append(w)
wb.append(b)
n_layer.set_weights(wb)
return n_layer
def build_model():
# Normalize characters using a dense layer
model = Sequential()
dense_layer = Dense(INPUT_VOCAB_SIZE, input_shape=(INPUT_VOCAB_SIZE,))
model.add(dense_layer)
normalization_layer_set_weights(dense_layer)
return model
model = build_model()
with open(sys.argv[1]) as f:
for line in f:
if line.isspace(): continue
batch = encode_one_hot(line)
preds = model.predict(batch)
normal = decode_one_hot(preds)
print(normal)
|
|
5637909c0115db0bf6d10acecbed99ce2be5cdd2
|
create_vm.py
|
create_vm.py
|
import argparse
import time
import novaclient.v1_1.client as nvclient
parser = argparse.ArgumentParser(description="Script for vm create")
parser.add_argument("-openstack_user", dest='openstack_user', type=str,
help="Openstack username", default='admin')
parser.add_argument("-openstack_password", dest='openstack_password',
type=str, help="Openstack password",
default='111')
parser.add_argument("-openstack_tenant", dest='openstack_tenant', type=str,
help="Openstack tenant", default='admin')
parser.add_argument("-keystone_url", dest='keystone_url', type=str,
help="Keystone url", default='http://localhost:5000/v2.0/')
parser.add_argument("-server_name", dest='server_name', type=str,
help="Server name", default='cimachine')
parser.add_argument("-image_id", dest='image_id', type=str,
help="Image id", default='id')
parser.add_argument("-flavor_id", dest='flavor_id', type=str,
help="Flavor id", default='id')
parser.add_argument("-net_id", dest='net_id', type=str,
help="Network id", default='id')
parser.add_argument("-keypair", dest='keypair', type=str,
help="Keypair name", default='huj')
args = parser.parse_args()
user = args.openstack_user
password = args.openstack_password
tenant = args.openstack_tenant
keystone_url = args.keystone_url
server_name = args.server_name
image_id = args.image_id
flavor_id = args.flavor_id
net_id = args.net_id
keypair = args.keypair
nova = nvclient.Client(user, password, tenant, keystone_url,
service_type="compute")
server = nova.servers.create(server_name, image_id, flavor_id,
key_name=keypair, nics=[{'net-id': net_id}])
start_time = time.time()
while nova.servers.get(server.id).status != 'ACTIVE':
if time.time() - start_time > 250:
raise Exception
time.sleep(1)
|
Create script for boot instance in cloud
|
Create script for boot instance in cloud
|
Python
|
apache-2.0
|
smurashov/test-infra,smurashov/test-infra
|
Create script for boot instance in cloud
|
import argparse
import time
import novaclient.v1_1.client as nvclient
parser = argparse.ArgumentParser(description="Script for vm create")
parser.add_argument("-openstack_user", dest='openstack_user', type=str,
help="Openstack username", default='admin')
parser.add_argument("-openstack_password", dest='openstack_password',
type=str, help="Openstack password",
default='111')
parser.add_argument("-openstack_tenant", dest='openstack_tenant', type=str,
help="Openstack tenant", default='admin')
parser.add_argument("-keystone_url", dest='keystone_url', type=str,
help="Keystone url", default='http://localhost:5000/v2.0/')
parser.add_argument("-server_name", dest='server_name', type=str,
help="Server name", default='cimachine')
parser.add_argument("-image_id", dest='image_id', type=str,
help="Image id", default='id')
parser.add_argument("-flavor_id", dest='flavor_id', type=str,
help="Flavor id", default='id')
parser.add_argument("-net_id", dest='net_id', type=str,
help="Network id", default='id')
parser.add_argument("-keypair", dest='keypair', type=str,
help="Keypair name", default='huj')
args = parser.parse_args()
user = args.openstack_user
password = args.openstack_password
tenant = args.openstack_tenant
keystone_url = args.keystone_url
server_name = args.server_name
image_id = args.image_id
flavor_id = args.flavor_id
net_id = args.net_id
keypair = args.keypair
nova = nvclient.Client(user, password, tenant, keystone_url,
service_type="compute")
server = nova.servers.create(server_name, image_id, flavor_id,
key_name=keypair, nics=[{'net-id': net_id}])
start_time = time.time()
while nova.servers.get(server.id).status != 'ACTIVE':
if time.time() - start_time > 250:
raise Exception
time.sleep(1)
|
<commit_before><commit_msg>Create script for boot instance in cloud<commit_after>
|
import argparse
import time
import novaclient.v1_1.client as nvclient
parser = argparse.ArgumentParser(description="Script for vm create")
parser.add_argument("-openstack_user", dest='openstack_user', type=str,
help="Openstack username", default='admin')
parser.add_argument("-openstack_password", dest='openstack_password',
type=str, help="Openstack password",
default='111')
parser.add_argument("-openstack_tenant", dest='openstack_tenant', type=str,
help="Openstack tenant", default='admin')
parser.add_argument("-keystone_url", dest='keystone_url', type=str,
help="Keystone url", default='http://localhost:5000/v2.0/')
parser.add_argument("-server_name", dest='server_name', type=str,
help="Server name", default='cimachine')
parser.add_argument("-image_id", dest='image_id', type=str,
help="Image id", default='id')
parser.add_argument("-flavor_id", dest='flavor_id', type=str,
help="Flavor id", default='id')
parser.add_argument("-net_id", dest='net_id', type=str,
help="Network id", default='id')
parser.add_argument("-keypair", dest='keypair', type=str,
help="Keypair name", default='huj')
args = parser.parse_args()
user = args.openstack_user
password = args.openstack_password
tenant = args.openstack_tenant
keystone_url = args.keystone_url
server_name = args.server_name
image_id = args.image_id
flavor_id = args.flavor_id
net_id = args.net_id
keypair = args.keypair
nova = nvclient.Client(user, password, tenant, keystone_url,
service_type="compute")
server = nova.servers.create(server_name, image_id, flavor_id,
key_name=keypair, nics=[{'net-id': net_id}])
start_time = time.time()
while nova.servers.get(server.id).status != 'ACTIVE':
if time.time() - start_time > 250:
raise Exception
time.sleep(1)
|
Create script for boot instance in cloudimport argparse
import time
import novaclient.v1_1.client as nvclient
parser = argparse.ArgumentParser(description="Script for vm create")
parser.add_argument("-openstack_user", dest='openstack_user', type=str,
help="Openstack username", default='admin')
parser.add_argument("-openstack_password", dest='openstack_password',
type=str, help="Openstack password",
default='111')
parser.add_argument("-openstack_tenant", dest='openstack_tenant', type=str,
help="Openstack tenant", default='admin')
parser.add_argument("-keystone_url", dest='keystone_url', type=str,
help="Keystone url", default='http://localhost:5000/v2.0/')
parser.add_argument("-server_name", dest='server_name', type=str,
help="Server name", default='cimachine')
parser.add_argument("-image_id", dest='image_id', type=str,
help="Image id", default='id')
parser.add_argument("-flavor_id", dest='flavor_id', type=str,
help="Flavor id", default='id')
parser.add_argument("-net_id", dest='net_id', type=str,
help="Network id", default='id')
parser.add_argument("-keypair", dest='keypair', type=str,
help="Keypair name", default='huj')
args = parser.parse_args()
user = args.openstack_user
password = args.openstack_password
tenant = args.openstack_tenant
keystone_url = args.keystone_url
server_name = args.server_name
image_id = args.image_id
flavor_id = args.flavor_id
net_id = args.net_id
keypair = args.keypair
nova = nvclient.Client(user, password, tenant, keystone_url,
service_type="compute")
server = nova.servers.create(server_name, image_id, flavor_id,
key_name=keypair, nics=[{'net-id': net_id}])
start_time = time.time()
while nova.servers.get(server.id).status != 'ACTIVE':
if time.time() - start_time > 250:
raise Exception
time.sleep(1)
|
<commit_before><commit_msg>Create script for boot instance in cloud<commit_after>import argparse
import time
import novaclient.v1_1.client as nvclient
parser = argparse.ArgumentParser(description="Script for vm create")
parser.add_argument("-openstack_user", dest='openstack_user', type=str,
help="Openstack username", default='admin')
parser.add_argument("-openstack_password", dest='openstack_password',
type=str, help="Openstack password",
default='111')
parser.add_argument("-openstack_tenant", dest='openstack_tenant', type=str,
help="Openstack tenant", default='admin')
parser.add_argument("-keystone_url", dest='keystone_url', type=str,
help="Keystone url", default='http://localhost:5000/v2.0/')
parser.add_argument("-server_name", dest='server_name', type=str,
help="Server name", default='cimachine')
parser.add_argument("-image_id", dest='image_id', type=str,
help="Image id", default='id')
parser.add_argument("-flavor_id", dest='flavor_id', type=str,
help="Flavor id", default='id')
parser.add_argument("-net_id", dest='net_id', type=str,
help="Network id", default='id')
parser.add_argument("-keypair", dest='keypair', type=str,
help="Keypair name", default='huj')
args = parser.parse_args()
user = args.openstack_user
password = args.openstack_password
tenant = args.openstack_tenant
keystone_url = args.keystone_url
server_name = args.server_name
image_id = args.image_id
flavor_id = args.flavor_id
net_id = args.net_id
keypair = args.keypair
nova = nvclient.Client(user, password, tenant, keystone_url,
service_type="compute")
server = nova.servers.create(server_name, image_id, flavor_id,
key_name=keypair, nics=[{'net-id': net_id}])
start_time = time.time()
while nova.servers.get(server.id).status != 'ACTIVE':
if time.time() - start_time > 250:
raise Exception
time.sleep(1)
|
|
1ad38ae2f932e7b8d4e51d2ef5f681840266d1d8
|
ensemble/volren/tests/test_volume_renderer.py
|
ensemble/volren/tests/test_volume_renderer.py
|
import unittest
import numpy as np
from traits_enaml.testing.enaml_test_assistant import EnamlTestAssistant
from ensemble.volren.volume_data import VolumeData
from ensemble.volren.volume_renderer import VolumeRenderer
class VolumeRendererTestCase(EnamlTestAssistant, unittest.TestCase):
def setUp(self):
EnamlTestAssistant.setUp(self)
enaml_source = """
from enaml.widgets.api import Container
from ensemble.volren.volume_render_view import VolumeRenderView
enamldef MainView(Container): view:
attr renderer
VolumeRenderView:
renderer << view.renderer
"""
volume = np.random.normal(size=(32, 32, 32))
volume = (255*(volume-volume.min())/volume.ptp()).astype(np.uint8)
volume_data = VolumeData(data=volume)
self.renderer = VolumeRenderer(volume_data=volume_data)
self.view, _ = self.parse_and_create(enaml_source,
renderer=self.renderer)
def tearDown(self):
self.view = None
self.renderer = None
EnamlTestAssistant.tearDown(self)
def test_renderer_initialized(self):
self.assertTrue(self.renderer.volume is not None)
if __name__ == "__main__":
unittest.main()
|
Add a very basic unit test for the volume renderer widget.
|
Add a very basic unit test for the volume renderer widget.
|
Python
|
bsd-3-clause
|
dmsurti/ensemble
|
Add a very basic unit test for the volume renderer widget.
|
import unittest
import numpy as np
from traits_enaml.testing.enaml_test_assistant import EnamlTestAssistant
from ensemble.volren.volume_data import VolumeData
from ensemble.volren.volume_renderer import VolumeRenderer
class VolumeRendererTestCase(EnamlTestAssistant, unittest.TestCase):
def setUp(self):
EnamlTestAssistant.setUp(self)
enaml_source = """
from enaml.widgets.api import Container
from ensemble.volren.volume_render_view import VolumeRenderView
enamldef MainView(Container): view:
attr renderer
VolumeRenderView:
renderer << view.renderer
"""
volume = np.random.normal(size=(32, 32, 32))
volume = (255*(volume-volume.min())/volume.ptp()).astype(np.uint8)
volume_data = VolumeData(data=volume)
self.renderer = VolumeRenderer(volume_data=volume_data)
self.view, _ = self.parse_and_create(enaml_source,
renderer=self.renderer)
def tearDown(self):
self.view = None
self.renderer = None
EnamlTestAssistant.tearDown(self)
def test_renderer_initialized(self):
self.assertTrue(self.renderer.volume is not None)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a very basic unit test for the volume renderer widget.<commit_after>
|
import unittest
import numpy as np
from traits_enaml.testing.enaml_test_assistant import EnamlTestAssistant
from ensemble.volren.volume_data import VolumeData
from ensemble.volren.volume_renderer import VolumeRenderer
class VolumeRendererTestCase(EnamlTestAssistant, unittest.TestCase):
def setUp(self):
EnamlTestAssistant.setUp(self)
enaml_source = """
from enaml.widgets.api import Container
from ensemble.volren.volume_render_view import VolumeRenderView
enamldef MainView(Container): view:
attr renderer
VolumeRenderView:
renderer << view.renderer
"""
volume = np.random.normal(size=(32, 32, 32))
volume = (255*(volume-volume.min())/volume.ptp()).astype(np.uint8)
volume_data = VolumeData(data=volume)
self.renderer = VolumeRenderer(volume_data=volume_data)
self.view, _ = self.parse_and_create(enaml_source,
renderer=self.renderer)
def tearDown(self):
self.view = None
self.renderer = None
EnamlTestAssistant.tearDown(self)
def test_renderer_initialized(self):
self.assertTrue(self.renderer.volume is not None)
if __name__ == "__main__":
unittest.main()
|
Add a very basic unit test for the volume renderer widget.import unittest
import numpy as np
from traits_enaml.testing.enaml_test_assistant import EnamlTestAssistant
from ensemble.volren.volume_data import VolumeData
from ensemble.volren.volume_renderer import VolumeRenderer
class VolumeRendererTestCase(EnamlTestAssistant, unittest.TestCase):
def setUp(self):
EnamlTestAssistant.setUp(self)
enaml_source = """
from enaml.widgets.api import Container
from ensemble.volren.volume_render_view import VolumeRenderView
enamldef MainView(Container): view:
attr renderer
VolumeRenderView:
renderer << view.renderer
"""
volume = np.random.normal(size=(32, 32, 32))
volume = (255*(volume-volume.min())/volume.ptp()).astype(np.uint8)
volume_data = VolumeData(data=volume)
self.renderer = VolumeRenderer(volume_data=volume_data)
self.view, _ = self.parse_and_create(enaml_source,
renderer=self.renderer)
def tearDown(self):
self.view = None
self.renderer = None
EnamlTestAssistant.tearDown(self)
def test_renderer_initialized(self):
self.assertTrue(self.renderer.volume is not None)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a very basic unit test for the volume renderer widget.<commit_after>import unittest
import numpy as np
from traits_enaml.testing.enaml_test_assistant import EnamlTestAssistant
from ensemble.volren.volume_data import VolumeData
from ensemble.volren.volume_renderer import VolumeRenderer
class VolumeRendererTestCase(EnamlTestAssistant, unittest.TestCase):
def setUp(self):
EnamlTestAssistant.setUp(self)
enaml_source = """
from enaml.widgets.api import Container
from ensemble.volren.volume_render_view import VolumeRenderView
enamldef MainView(Container): view:
attr renderer
VolumeRenderView:
renderer << view.renderer
"""
volume = np.random.normal(size=(32, 32, 32))
volume = (255*(volume-volume.min())/volume.ptp()).astype(np.uint8)
volume_data = VolumeData(data=volume)
self.renderer = VolumeRenderer(volume_data=volume_data)
self.view, _ = self.parse_and_create(enaml_source,
renderer=self.renderer)
def tearDown(self):
self.view = None
self.renderer = None
EnamlTestAssistant.tearDown(self)
def test_renderer_initialized(self):
self.assertTrue(self.renderer.volume is not None)
if __name__ == "__main__":
unittest.main()
|
|
6775c82c1ad9c60a034a521c8b8807ba7170d95d
|
games/management/commands/mame_ico_to_icon.py
|
games/management/commands/mame_ico_to_icon.py
|
"""Generate banners from ProgettoSnap marquees"""
import os
from PIL import Image
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from common.util import crop_banner
from games.models import Game
if settings.DEBUG:
MAME_ICO_PATH = "/media/strider/Backup/Games/Arcade/icons"
else:
MAME_ICO_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons")
ICON_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons-png")
class Command(BaseCommand):
"""Resize banners and icons"""
@staticmethod
def make_icon_from_icon(game):
"""Generate a banner for a game from available marquees"""
mame_ids = [pgame.slug for pgame in game.provider_games.all()]
for mame_id in mame_ids:
mame_ico_path = os.path.join(MAME_ICO_PATH, "%s.ico" % mame_id)
if not os.path.exists(mame_ico_path):
continue
icon_filename = "%s.png" % mame_id
icon_path = os.path.join(ICON_PATH, icon_filename)
try:
ico_file = Image.open(mame_ico_path)
except ValueError:
print("Failed to read %s" % mame_ico_path)
continue
ico_file.save(icon_path, "PNG")
with open(icon_path, "rb") as banner_file:
icon_content = banner_file.read()
game.icon = ContentFile(icon_content, icon_filename)
game.save()
print("Icon created for %s" % game)
return
def handle(self, *args, **_kwargs):
"""Run command"""
if not os.path.exists(ICON_PATH):
os.makedirs(ICON_PATH)
for game in Game.objects.filter(provider_games__provider__name="MAME"):
if game.icon:
continue
self.make_icon_from_icon(game)
|
Add task to create Lutris icons from MAME icons
|
Add task to create Lutris icons from MAME icons
|
Python
|
agpl-3.0
|
lutris/website,lutris/website,lutris/website,lutris/website
|
Add task to create Lutris icons from MAME icons
|
"""Generate banners from ProgettoSnap marquees"""
import os
from PIL import Image
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from common.util import crop_banner
from games.models import Game
if settings.DEBUG:
MAME_ICO_PATH = "/media/strider/Backup/Games/Arcade/icons"
else:
MAME_ICO_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons")
ICON_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons-png")
class Command(BaseCommand):
"""Resize banners and icons"""
@staticmethod
def make_icon_from_icon(game):
"""Generate a banner for a game from available marquees"""
mame_ids = [pgame.slug for pgame in game.provider_games.all()]
for mame_id in mame_ids:
mame_ico_path = os.path.join(MAME_ICO_PATH, "%s.ico" % mame_id)
if not os.path.exists(mame_ico_path):
continue
icon_filename = "%s.png" % mame_id
icon_path = os.path.join(ICON_PATH, icon_filename)
try:
ico_file = Image.open(mame_ico_path)
except ValueError:
print("Failed to read %s" % mame_ico_path)
continue
ico_file.save(icon_path, "PNG")
with open(icon_path, "rb") as banner_file:
icon_content = banner_file.read()
game.icon = ContentFile(icon_content, icon_filename)
game.save()
print("Icon created for %s" % game)
return
def handle(self, *args, **_kwargs):
"""Run command"""
if not os.path.exists(ICON_PATH):
os.makedirs(ICON_PATH)
for game in Game.objects.filter(provider_games__provider__name="MAME"):
if game.icon:
continue
self.make_icon_from_icon(game)
|
<commit_before><commit_msg>Add task to create Lutris icons from MAME icons<commit_after>
|
"""Generate banners from ProgettoSnap marquees"""
import os
from PIL import Image
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from common.util import crop_banner
from games.models import Game
if settings.DEBUG:
MAME_ICO_PATH = "/media/strider/Backup/Games/Arcade/icons"
else:
MAME_ICO_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons")
ICON_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons-png")
class Command(BaseCommand):
"""Resize banners and icons"""
@staticmethod
def make_icon_from_icon(game):
"""Generate a banner for a game from available marquees"""
mame_ids = [pgame.slug for pgame in game.provider_games.all()]
for mame_id in mame_ids:
mame_ico_path = os.path.join(MAME_ICO_PATH, "%s.ico" % mame_id)
if not os.path.exists(mame_ico_path):
continue
icon_filename = "%s.png" % mame_id
icon_path = os.path.join(ICON_PATH, icon_filename)
try:
ico_file = Image.open(mame_ico_path)
except ValueError:
print("Failed to read %s" % mame_ico_path)
continue
ico_file.save(icon_path, "PNG")
with open(icon_path, "rb") as banner_file:
icon_content = banner_file.read()
game.icon = ContentFile(icon_content, icon_filename)
game.save()
print("Icon created for %s" % game)
return
def handle(self, *args, **_kwargs):
"""Run command"""
if not os.path.exists(ICON_PATH):
os.makedirs(ICON_PATH)
for game in Game.objects.filter(provider_games__provider__name="MAME"):
if game.icon:
continue
self.make_icon_from_icon(game)
|
Add task to create Lutris icons from MAME icons"""Generate banners from ProgettoSnap marquees"""
import os
from PIL import Image
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from common.util import crop_banner
from games.models import Game
if settings.DEBUG:
MAME_ICO_PATH = "/media/strider/Backup/Games/Arcade/icons"
else:
MAME_ICO_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons")
ICON_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons-png")
class Command(BaseCommand):
"""Resize banners and icons"""
@staticmethod
def make_icon_from_icon(game):
"""Generate a banner for a game from available marquees"""
mame_ids = [pgame.slug for pgame in game.provider_games.all()]
for mame_id in mame_ids:
mame_ico_path = os.path.join(MAME_ICO_PATH, "%s.ico" % mame_id)
if not os.path.exists(mame_ico_path):
continue
icon_filename = "%s.png" % mame_id
icon_path = os.path.join(ICON_PATH, icon_filename)
try:
ico_file = Image.open(mame_ico_path)
except ValueError:
print("Failed to read %s" % mame_ico_path)
continue
ico_file.save(icon_path, "PNG")
with open(icon_path, "rb") as banner_file:
icon_content = banner_file.read()
game.icon = ContentFile(icon_content, icon_filename)
game.save()
print("Icon created for %s" % game)
return
def handle(self, *args, **_kwargs):
"""Run command"""
if not os.path.exists(ICON_PATH):
os.makedirs(ICON_PATH)
for game in Game.objects.filter(provider_games__provider__name="MAME"):
if game.icon:
continue
self.make_icon_from_icon(game)
|
<commit_before><commit_msg>Add task to create Lutris icons from MAME icons<commit_after>"""Generate banners from ProgettoSnap marquees"""
import os
from PIL import Image
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from common.util import crop_banner
from games.models import Game
if settings.DEBUG:
MAME_ICO_PATH = "/media/strider/Backup/Games/Arcade/icons"
else:
MAME_ICO_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons")
ICON_PATH = os.path.join(settings.MEDIA_ROOT, "mame/icons-png")
class Command(BaseCommand):
"""Resize banners and icons"""
@staticmethod
def make_icon_from_icon(game):
"""Generate a banner for a game from available marquees"""
mame_ids = [pgame.slug for pgame in game.provider_games.all()]
for mame_id in mame_ids:
mame_ico_path = os.path.join(MAME_ICO_PATH, "%s.ico" % mame_id)
if not os.path.exists(mame_ico_path):
continue
icon_filename = "%s.png" % mame_id
icon_path = os.path.join(ICON_PATH, icon_filename)
try:
ico_file = Image.open(mame_ico_path)
except ValueError:
print("Failed to read %s" % mame_ico_path)
continue
ico_file.save(icon_path, "PNG")
with open(icon_path, "rb") as banner_file:
icon_content = banner_file.read()
game.icon = ContentFile(icon_content, icon_filename)
game.save()
print("Icon created for %s" % game)
return
def handle(self, *args, **_kwargs):
"""Run command"""
if not os.path.exists(ICON_PATH):
os.makedirs(ICON_PATH)
for game in Game.objects.filter(provider_games__provider__name="MAME"):
if game.icon:
continue
self.make_icon_from_icon(game)
|
|
bdada0e40b481f8d9c81032c8dff475817a1a55e
|
tests/test_authentication.py
|
tests/test_authentication.py
|
"""tests/test_authentication.py.
Tests hugs built-in authentication helper methods
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from base64 import b64encode
import falcon
import pytest
import hug
api = sys.modules[__name__]
def test_basic_auth():
'''Test to ensure hugs provide basic_auth handler works as expected'''
@hug.get(only_if=hug.authentication.basic(hug.authentication.verify('Tim', 'Custom password')))
def hello_world():
return 'Hello world!'
assert '401' in hug.test.get(api, 'hello_world').status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Not correctly formed'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Nospaces'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic VXNlcjE6bXlwYXNzd29yZA'}).status
token = b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8')).decode('utf8')
assert hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic {0}'.format(token)}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8'))
assert hug.test.get(api, 'hello_world', headers={'Authorization': token}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Wrong password').encode('utf8'))
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': token}).status
|
Add test to define how basic authentication should wokr
|
Add test to define how basic authentication should wokr
|
Python
|
mit
|
giserh/hug,MuhammadAlkarouri/hug,timothycrosley/hug,MuhammadAlkarouri/hug,timothycrosley/hug,giserh/hug,timothycrosley/hug,MuhammadAlkarouri/hug
|
Add test to define how basic authentication should wokr
|
"""tests/test_authentication.py.
Tests hugs built-in authentication helper methods
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from base64 import b64encode
import falcon
import pytest
import hug
api = sys.modules[__name__]
def test_basic_auth():
'''Test to ensure hugs provide basic_auth handler works as expected'''
@hug.get(only_if=hug.authentication.basic(hug.authentication.verify('Tim', 'Custom password')))
def hello_world():
return 'Hello world!'
assert '401' in hug.test.get(api, 'hello_world').status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Not correctly formed'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Nospaces'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic VXNlcjE6bXlwYXNzd29yZA'}).status
token = b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8')).decode('utf8')
assert hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic {0}'.format(token)}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8'))
assert hug.test.get(api, 'hello_world', headers={'Authorization': token}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Wrong password').encode('utf8'))
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': token}).status
|
<commit_before><commit_msg>Add test to define how basic authentication should wokr<commit_after>
|
"""tests/test_authentication.py.
Tests hugs built-in authentication helper methods
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from base64 import b64encode
import falcon
import pytest
import hug
api = sys.modules[__name__]
def test_basic_auth():
'''Test to ensure hugs provide basic_auth handler works as expected'''
@hug.get(only_if=hug.authentication.basic(hug.authentication.verify('Tim', 'Custom password')))
def hello_world():
return 'Hello world!'
assert '401' in hug.test.get(api, 'hello_world').status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Not correctly formed'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Nospaces'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic VXNlcjE6bXlwYXNzd29yZA'}).status
token = b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8')).decode('utf8')
assert hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic {0}'.format(token)}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8'))
assert hug.test.get(api, 'hello_world', headers={'Authorization': token}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Wrong password').encode('utf8'))
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': token}).status
|
Add test to define how basic authentication should wokr"""tests/test_authentication.py.
Tests hugs built-in authentication helper methods
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from base64 import b64encode
import falcon
import pytest
import hug
api = sys.modules[__name__]
def test_basic_auth():
'''Test to ensure hugs provide basic_auth handler works as expected'''
@hug.get(only_if=hug.authentication.basic(hug.authentication.verify('Tim', 'Custom password')))
def hello_world():
return 'Hello world!'
assert '401' in hug.test.get(api, 'hello_world').status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Not correctly formed'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Nospaces'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic VXNlcjE6bXlwYXNzd29yZA'}).status
token = b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8')).decode('utf8')
assert hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic {0}'.format(token)}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8'))
assert hug.test.get(api, 'hello_world', headers={'Authorization': token}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Wrong password').encode('utf8'))
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': token}).status
|
<commit_before><commit_msg>Add test to define how basic authentication should wokr<commit_after>"""tests/test_authentication.py.
Tests hugs built-in authentication helper methods
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
from base64 import b64encode
import falcon
import pytest
import hug
api = sys.modules[__name__]
def test_basic_auth():
'''Test to ensure hugs provide basic_auth handler works as expected'''
@hug.get(only_if=hug.authentication.basic(hug.authentication.verify('Tim', 'Custom password')))
def hello_world():
return 'Hello world!'
assert '401' in hug.test.get(api, 'hello_world').status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Not correctly formed'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Nospaces'}).status
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic VXNlcjE6bXlwYXNzd29yZA'}).status
token = b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8')).decode('utf8')
assert hug.test.get(api, 'hello_world', headers={'Authorization': 'Basic {0}'.format(token)}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Custom password').encode('utf8'))
assert hug.test.get(api, 'hello_world', headers={'Authorization': token}).data == 'Hello world!'
token = b'Basic ' + b64encode('{0}:{1}'.format('Tim', 'Wrong password').encode('utf8'))
assert '401' in hug.test.get(api, 'hello_world', headers={'Authorization': token}).status
|
|
74a413cb59eb59ce7354ca90d6b49b26960ad235
|
web/main/migrations/0022_post_32_upgrade.py
|
web/main/migrations/0022_post_32_upgrade.py
|
# Generated by Django 3.2.14 on 2022-07-05 17:19
import django.core.validators
from django.db import migrations, models
import main.models
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20220527_1714'),
]
operations = [
migrations.AlterField(
model_name='commontitle',
name='public_url',
field=models.CharField(max_length=300, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid')]),
),
migrations.AlterField(
model_name='historicallegaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='legaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='public_url',
field=models.CharField(blank=True, max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid'), main.models.validate_unused_prefix]),
),
]
|
Add migrations that were pending since the 3.2 upgrade
|
Add migrations that were pending since the 3.2 upgrade
|
Python
|
agpl-3.0
|
harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o
|
Add migrations that were pending since the 3.2 upgrade
|
# Generated by Django 3.2.14 on 2022-07-05 17:19
import django.core.validators
from django.db import migrations, models
import main.models
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20220527_1714'),
]
operations = [
migrations.AlterField(
model_name='commontitle',
name='public_url',
field=models.CharField(max_length=300, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid')]),
),
migrations.AlterField(
model_name='historicallegaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='legaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='public_url',
field=models.CharField(blank=True, max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid'), main.models.validate_unused_prefix]),
),
]
|
<commit_before><commit_msg>Add migrations that were pending since the 3.2 upgrade<commit_after>
|
# Generated by Django 3.2.14 on 2022-07-05 17:19
import django.core.validators
from django.db import migrations, models
import main.models
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20220527_1714'),
]
operations = [
migrations.AlterField(
model_name='commontitle',
name='public_url',
field=models.CharField(max_length=300, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid')]),
),
migrations.AlterField(
model_name='historicallegaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='legaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='public_url',
field=models.CharField(blank=True, max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid'), main.models.validate_unused_prefix]),
),
]
|
Add migrations that were pending since the 3.2 upgrade# Generated by Django 3.2.14 on 2022-07-05 17:19
import django.core.validators
from django.db import migrations, models
import main.models
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20220527_1714'),
]
operations = [
migrations.AlterField(
model_name='commontitle',
name='public_url',
field=models.CharField(max_length=300, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid')]),
),
migrations.AlterField(
model_name='historicallegaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='legaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='public_url',
field=models.CharField(blank=True, max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid'), main.models.validate_unused_prefix]),
),
]
|
<commit_before><commit_msg>Add migrations that were pending since the 3.2 upgrade<commit_after># Generated by Django 3.2.14 on 2022-07-05 17:19
import django.core.validators
from django.db import migrations, models
import main.models
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20220527_1714'),
]
operations = [
migrations.AlterField(
model_name='commontitle',
name='public_url',
field=models.CharField(max_length=300, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid')]),
),
migrations.AlterField(
model_name='historicallegaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='legaldocument',
name='metadata',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='public_url',
field=models.CharField(blank=True, max_length=255, null=True, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-\\w]+\\Z'), 'Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.', 'invalid'), main.models.validate_unused_prefix]),
),
]
|
|
7d98b3ab9ba55478e69d27d77ff962ff7eab73c1
|
nbgrader/tests/test_api.py
|
nbgrader/tests/test_api.py
|
from nbgrader import api
class TestApi(object):
def test_create_assignment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
assert a.assignment_id == 'foo'
assert a.duedate == 'someday'
assert a._id
def test_create_student(self):
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
assert s.student_id == 12345
assert s.first_name == 'Jane'
assert s.last_name == 'Doe'
assert s.email == 'janedoe@nowhere'
assert s._id
def test_create_notebook(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
assert n.notebook_id == 'blah'
assert n.assignment == a
assert n.student == s
assert n._id
assert n.to_dict()['assignment'] == a._id
assert n.to_dict()['student'] == s._id
def test_create_grade(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
g = api.Grade(grade_id='foo', max_score=10, autoscore=1, score=5, notebook=n)
assert g.grade_id == 'foo'
assert g.max_score == 10
assert g.autoscore == 1
assert g.score == 5
assert g.notebook == n
assert g._id
assert g.to_dict()['notebook'] == n._id
def test_create_comment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
c = api.Comment(comment_id='foo', comment='lorem ipsum', notebook=n)
assert c.comment_id == 'foo'
assert c.comment == 'lorem ipsum'
assert c.notebook == n
assert c._id
assert c.to_dict()['notebook'] == n._id
|
Add some minimal api tests
|
Add some minimal api tests
|
Python
|
bsd-3-clause
|
modulexcite/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,alope107/nbgrader,MatKallada/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,dementrock/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,jdfreder/nbgrader
|
Add some minimal api tests
|
from nbgrader import api
class TestApi(object):
def test_create_assignment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
assert a.assignment_id == 'foo'
assert a.duedate == 'someday'
assert a._id
def test_create_student(self):
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
assert s.student_id == 12345
assert s.first_name == 'Jane'
assert s.last_name == 'Doe'
assert s.email == 'janedoe@nowhere'
assert s._id
def test_create_notebook(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
assert n.notebook_id == 'blah'
assert n.assignment == a
assert n.student == s
assert n._id
assert n.to_dict()['assignment'] == a._id
assert n.to_dict()['student'] == s._id
def test_create_grade(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
g = api.Grade(grade_id='foo', max_score=10, autoscore=1, score=5, notebook=n)
assert g.grade_id == 'foo'
assert g.max_score == 10
assert g.autoscore == 1
assert g.score == 5
assert g.notebook == n
assert g._id
assert g.to_dict()['notebook'] == n._id
def test_create_comment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
c = api.Comment(comment_id='foo', comment='lorem ipsum', notebook=n)
assert c.comment_id == 'foo'
assert c.comment == 'lorem ipsum'
assert c.notebook == n
assert c._id
assert c.to_dict()['notebook'] == n._id
|
<commit_before><commit_msg>Add some minimal api tests<commit_after>
|
from nbgrader import api
class TestApi(object):
def test_create_assignment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
assert a.assignment_id == 'foo'
assert a.duedate == 'someday'
assert a._id
def test_create_student(self):
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
assert s.student_id == 12345
assert s.first_name == 'Jane'
assert s.last_name == 'Doe'
assert s.email == 'janedoe@nowhere'
assert s._id
def test_create_notebook(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
assert n.notebook_id == 'blah'
assert n.assignment == a
assert n.student == s
assert n._id
assert n.to_dict()['assignment'] == a._id
assert n.to_dict()['student'] == s._id
def test_create_grade(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
g = api.Grade(grade_id='foo', max_score=10, autoscore=1, score=5, notebook=n)
assert g.grade_id == 'foo'
assert g.max_score == 10
assert g.autoscore == 1
assert g.score == 5
assert g.notebook == n
assert g._id
assert g.to_dict()['notebook'] == n._id
def test_create_comment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
c = api.Comment(comment_id='foo', comment='lorem ipsum', notebook=n)
assert c.comment_id == 'foo'
assert c.comment == 'lorem ipsum'
assert c.notebook == n
assert c._id
assert c.to_dict()['notebook'] == n._id
|
Add some minimal api testsfrom nbgrader import api
class TestApi(object):
def test_create_assignment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
assert a.assignment_id == 'foo'
assert a.duedate == 'someday'
assert a._id
def test_create_student(self):
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
assert s.student_id == 12345
assert s.first_name == 'Jane'
assert s.last_name == 'Doe'
assert s.email == 'janedoe@nowhere'
assert s._id
def test_create_notebook(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
assert n.notebook_id == 'blah'
assert n.assignment == a
assert n.student == s
assert n._id
assert n.to_dict()['assignment'] == a._id
assert n.to_dict()['student'] == s._id
def test_create_grade(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
g = api.Grade(grade_id='foo', max_score=10, autoscore=1, score=5, notebook=n)
assert g.grade_id == 'foo'
assert g.max_score == 10
assert g.autoscore == 1
assert g.score == 5
assert g.notebook == n
assert g._id
assert g.to_dict()['notebook'] == n._id
def test_create_comment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
c = api.Comment(comment_id='foo', comment='lorem ipsum', notebook=n)
assert c.comment_id == 'foo'
assert c.comment == 'lorem ipsum'
assert c.notebook == n
assert c._id
assert c.to_dict()['notebook'] == n._id
|
<commit_before><commit_msg>Add some minimal api tests<commit_after>from nbgrader import api
class TestApi(object):
def test_create_assignment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
assert a.assignment_id == 'foo'
assert a.duedate == 'someday'
assert a._id
def test_create_student(self):
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
assert s.student_id == 12345
assert s.first_name == 'Jane'
assert s.last_name == 'Doe'
assert s.email == 'janedoe@nowhere'
assert s._id
def test_create_notebook(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
assert n.notebook_id == 'blah'
assert n.assignment == a
assert n.student == s
assert n._id
assert n.to_dict()['assignment'] == a._id
assert n.to_dict()['student'] == s._id
def test_create_grade(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
g = api.Grade(grade_id='foo', max_score=10, autoscore=1, score=5, notebook=n)
assert g.grade_id == 'foo'
assert g.max_score == 10
assert g.autoscore == 1
assert g.score == 5
assert g.notebook == n
assert g._id
assert g.to_dict()['notebook'] == n._id
def test_create_comment(self):
a = api.Assignment(assignment_id='foo', duedate='someday')
s = api.Student(student_id=12345, first_name='Jane', last_name='Doe', email='janedoe@nowhere')
n = api.Notebook(notebook_id='blah', assignment=a, student=s)
c = api.Comment(comment_id='foo', comment='lorem ipsum', notebook=n)
assert c.comment_id == 'foo'
assert c.comment == 'lorem ipsum'
assert c.notebook == n
assert c._id
assert c.to_dict()['notebook'] == n._id
|
|
b687a3eac694a311bf4e92b9662a0f13c3506154
|
tests/linters/test_lint_nwod.py
|
tests/linters/test_lint_nwod.py
|
import npc
import pytest
import os
from tests.util import fixture_dir
def test_has_virtue():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing virtue' in problems
def test_has_vice():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing vice' in problems
|
Add core nwod linting tests
|
Add core nwod linting tests
|
Python
|
mit
|
aurule/npc,aurule/npc
|
Add core nwod linting tests
|
import npc
import pytest
import os
from tests.util import fixture_dir
def test_has_virtue():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing virtue' in problems
def test_has_vice():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing vice' in problems
|
<commit_before><commit_msg>Add core nwod linting tests<commit_after>
|
import npc
import pytest
import os
from tests.util import fixture_dir
def test_has_virtue():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing virtue' in problems
def test_has_vice():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing vice' in problems
|
Add core nwod linting testsimport npc
import pytest
import os
from tests.util import fixture_dir
def test_has_virtue():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing virtue' in problems
def test_has_vice():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing vice' in problems
|
<commit_before><commit_msg>Add core nwod linting tests<commit_after>import npc
import pytest
import os
from tests.util import fixture_dir
def test_has_virtue():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing virtue' in problems
def test_has_vice():
char_file = fixture_dir('linter', 'nwod', 'Gotta Nada.nwod')
with open(char_file, 'r') as char_file:
problems = npc.linters.nwod.lint_vice_virtue(char_file.read())
assert 'Missing vice' in problems
|
|
20fecc93aca627a4aaf42c36df32d5f5a38f5b7a
|
tools/map_2d_dictionaries.py
|
tools/map_2d_dictionaries.py
|
import numpy as np
import os
import itertools
import matplotlib.pyplot as plt
from parameter_prediction.datasets import dictionary
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def map_2d_dictionary(ax, dictionary):
# cannonical order for dictionary elements is row major
atoms = [
np.reshape(dictionary.get_atom(i), dictionary.extent)
for i in xrange(dictionary.size)]
rows = grouper(atoms, int(dictionary.extent[0]))
rows = [np.concatenate(row, axis=1) for row in rows]
full = np.concatenate(rows, axis=0)
ax.pcolor(full, cmap=plt.cm.gray)
ax.set_xlim([0, full.shape[0]])
ax.set_ylim([0, full.shape[1]])
return ax
if __name__ == "__main__":
output_dir = "scratch"
extent = [8,8]
dictionaries = [
dictionary.DCTDictionary(extent),
dictionary.GaussianKernelDictionary(extent, 1.0),
]
for d in dictionaries:
fig = plt.figure()
map_2d_dictionary(fig.gca(), d)
fig.savefig(os.path.join(output_dir, d.__class__.__name__ + ".png"))
|
Add script for visualizing 2d dictionaries.
|
Add script for visualizing 2d dictionaries.
Useful for debugging.
|
Python
|
mit
|
mdenil/parameter_prediction,mdenil/parameter_prediction,mdenil/parameter_prediction
|
Add script for visualizing 2d dictionaries.
Useful for debugging.
|
import numpy as np
import os
import itertools
import matplotlib.pyplot as plt
from parameter_prediction.datasets import dictionary
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def map_2d_dictionary(ax, dictionary):
# cannonical order for dictionary elements is row major
atoms = [
np.reshape(dictionary.get_atom(i), dictionary.extent)
for i in xrange(dictionary.size)]
rows = grouper(atoms, int(dictionary.extent[0]))
rows = [np.concatenate(row, axis=1) for row in rows]
full = np.concatenate(rows, axis=0)
ax.pcolor(full, cmap=plt.cm.gray)
ax.set_xlim([0, full.shape[0]])
ax.set_ylim([0, full.shape[1]])
return ax
if __name__ == "__main__":
output_dir = "scratch"
extent = [8,8]
dictionaries = [
dictionary.DCTDictionary(extent),
dictionary.GaussianKernelDictionary(extent, 1.0),
]
for d in dictionaries:
fig = plt.figure()
map_2d_dictionary(fig.gca(), d)
fig.savefig(os.path.join(output_dir, d.__class__.__name__ + ".png"))
|
<commit_before><commit_msg>Add script for visualizing 2d dictionaries.
Useful for debugging.<commit_after>
|
import numpy as np
import os
import itertools
import matplotlib.pyplot as plt
from parameter_prediction.datasets import dictionary
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def map_2d_dictionary(ax, dictionary):
# cannonical order for dictionary elements is row major
atoms = [
np.reshape(dictionary.get_atom(i), dictionary.extent)
for i in xrange(dictionary.size)]
rows = grouper(atoms, int(dictionary.extent[0]))
rows = [np.concatenate(row, axis=1) for row in rows]
full = np.concatenate(rows, axis=0)
ax.pcolor(full, cmap=plt.cm.gray)
ax.set_xlim([0, full.shape[0]])
ax.set_ylim([0, full.shape[1]])
return ax
if __name__ == "__main__":
output_dir = "scratch"
extent = [8,8]
dictionaries = [
dictionary.DCTDictionary(extent),
dictionary.GaussianKernelDictionary(extent, 1.0),
]
for d in dictionaries:
fig = plt.figure()
map_2d_dictionary(fig.gca(), d)
fig.savefig(os.path.join(output_dir, d.__class__.__name__ + ".png"))
|
Add script for visualizing 2d dictionaries.
Useful for debugging.import numpy as np
import os
import itertools
import matplotlib.pyplot as plt
from parameter_prediction.datasets import dictionary
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def map_2d_dictionary(ax, dictionary):
# cannonical order for dictionary elements is row major
atoms = [
np.reshape(dictionary.get_atom(i), dictionary.extent)
for i in xrange(dictionary.size)]
rows = grouper(atoms, int(dictionary.extent[0]))
rows = [np.concatenate(row, axis=1) for row in rows]
full = np.concatenate(rows, axis=0)
ax.pcolor(full, cmap=plt.cm.gray)
ax.set_xlim([0, full.shape[0]])
ax.set_ylim([0, full.shape[1]])
return ax
if __name__ == "__main__":
output_dir = "scratch"
extent = [8,8]
dictionaries = [
dictionary.DCTDictionary(extent),
dictionary.GaussianKernelDictionary(extent, 1.0),
]
for d in dictionaries:
fig = plt.figure()
map_2d_dictionary(fig.gca(), d)
fig.savefig(os.path.join(output_dir, d.__class__.__name__ + ".png"))
|
<commit_before><commit_msg>Add script for visualizing 2d dictionaries.
Useful for debugging.<commit_after>import numpy as np
import os
import itertools
import matplotlib.pyplot as plt
from parameter_prediction.datasets import dictionary
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def map_2d_dictionary(ax, dictionary):
# cannonical order for dictionary elements is row major
atoms = [
np.reshape(dictionary.get_atom(i), dictionary.extent)
for i in xrange(dictionary.size)]
rows = grouper(atoms, int(dictionary.extent[0]))
rows = [np.concatenate(row, axis=1) for row in rows]
full = np.concatenate(rows, axis=0)
ax.pcolor(full, cmap=plt.cm.gray)
ax.set_xlim([0, full.shape[0]])
ax.set_ylim([0, full.shape[1]])
return ax
if __name__ == "__main__":
output_dir = "scratch"
extent = [8,8]
dictionaries = [
dictionary.DCTDictionary(extent),
dictionary.GaussianKernelDictionary(extent, 1.0),
]
for d in dictionaries:
fig = plt.figure()
map_2d_dictionary(fig.gca(), d)
fig.savefig(os.path.join(output_dir, d.__class__.__name__ + ".png"))
|
|
2a0958455799601068db054c130fa9573e7c1e22
|
tensorflow/python/ops/parallel_for/__init__.py
|
tensorflow/python/ops/parallel_for/__init__.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'pfor',
'for_loop',
'jacobian',
'batch_jacobian',
]
remove_undocumented(__name__, _allowed_symbols)
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
|
Remove usage of remove_undocumented from core parallel_for.
|
Remove usage of remove_undocumented from core parallel_for.
remove_undocumented is causing issues with our pip tests.
remove_undocumented is not used anywhere else in core TF code
and we have a new mechanism for annotating the public TF API.
|
Python
|
apache-2.0
|
chemelnucfin/tensorflow,ppwwyyxx/tensorflow,annarev/tensorflow,jhseu/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,aselle/tensorflow,kobejean/tensorflow,Bismarrck/tensorflow,girving/tensorflow,petewarden/tensorflow,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_tf_optimizer,dancingdan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,seanli9jan/tensorflow,seanli9jan/tensorflow,dongjoon-hyun/tensorflow,Bismarrck/tensorflow,davidzchen/tensorflow,seanli9jan/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,jart/tensorflow,AnishShah/tensorflow,aselle/tensorflow,theflofly/tensorflow,hehongliang/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,gunan/tensorflow,sarvex/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,arborh/tensorflow,dongjoon-hyun/tensorflow,girving/tensorflow,hfp/tensorflow-xsmm,yongtang/tensorflow,manipopopo/tensorflow,jart/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jalexvig/tensorflow,ageron/tensorflow,aam-at/tensorflow,jendap/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,brchiu/tensorflow,annarev/tensorflow,chemelnucfin/tensorflow,ZhangXinNan/tensorflow,dongjoon-hyun/tensorflow,jalexvig/tensorflow,girving/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow,gunan/tensorflow,paolodedios/tensorflow,theflofly/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,alsrgv/tensorflow,kobejean/tensorflow,asimshankar/tensorflow,apark263/tensorflow,DavidNorman/tensorflow,hehongliang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,yongtang/tensorflow,chemelnucfin/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,alsrgv/tensorflow,annarev/tensorflow,jbedorf/tensorflow,gunan/tensorflow,cxxgtxy/tensorflow,arborh/tensorflow,annarev/tensorflow,ageron/tensorflow,snnn/tensorflow,gunan/tensorflow,arborh/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,jart/tensorflow,manipopopo/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ppwwyyxx/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,davidzchen/tensorflow,ghchinoy/tensorflow,ageron/tensorflow,kevin-coder/tensorflow-fork,gautam1858/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,gunan/tensorflow,davidzchen/tensorflow,dongjoon-hyun/tensorflow,sarvex/tensorflow,davidzchen/tensorflow,xzturn/tensorflow,dancingdan/tensorflow,alsrgv/tensorflow,apark263/tensorflow,gautam1858/tensorflow,asimshankar/tensorflow,kobejean/tensorflow,AnishShah/tensorflow,ZhangXinNan/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,seanli9jan/tensorflow,kobejean/tensorflow,girving/tensorflow,DavidNorman/tensorflow,dancingdan/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,snnn/tensorflow,manipopopo/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,asimshankar/tensorflow,aselle/tensorflow,aam-at/tensorflow,alsrgv/tensorflow,jalexvig/tensorflow,apark263/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,jhseu/tensorflow,jendap/tensorflow,alshedivat/tensorflow,xzturn/tensorflow,aselle/tensorflow,sarvex/tensorflow,ageron/tensorflow,jalexvig/tensorflow,gautam1858/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jendap/tensorflow,jhseu/tensorflow,girving/tensorflow,brchiu/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,aldian/tensorflow,jbedorf/tensorflow,aselle/tensorflow,brchiu/tensorflow,jendap/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jendap/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,theflofly/tensorflow,snnn/tensorflow,renyi533/tensorflow,gunan/tensorflow,xodus7/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,dancingdan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hfp/tensorflow-xsmm,theflofly/tensorflow,apark263/tensorflow,ppwwyyxx/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,Bismarrck/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,xodus7/tensorflow,arborh/tensorflow,tensorflow/tensorflow,aldian/tensorflow,kobejean/tensorflow,ghchinoy/tensorflow,apark263/tensorflow,manipopopo/tensorflow,alshedivat/tensorflow,tensorflow/tensorflow,jart/tensorflow,ghchinoy/tensorflow,manipopopo/tensorflow,arborh/tensorflow,DavidNorman/tensorflow,jendap/tensorflow,kobejean/tensorflow,arborh/tensorflow,chemelnucfin/tensorflow,jart/tensorflow,kevin-coder/tensorflow-fork,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,AnishShah/tensorflow,theflofly/tensorflow,hfp/tensorflow-xsmm,Bismarrck/tensorflow,girving/tensorflow,dancingdan/tensorflow,gautam1858/tensorflow,dongjoon-hyun/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,aselle/tensorflow,chemelnucfin/tensorflow,chemelnucfin/tensorflow,seanli9jan/tensorflow,alshedivat/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,ageron/tensorflow,theflofly/tensorflow,AnishShah/tensorflow,jart/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,jalexvig/tensorflow,arborh/tensorflow,alsrgv/tensorflow,aam-at/tensorflow,xzturn/tensorflow,DavidNorman/tensorflow,hehongliang/tensorflow,manipopopo/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,girving/tensorflow,ZhangXinNan/tensorflow,yongtang/tensorflow,AnishShah/tensorflow,aam-at/tensorflow,theflofly/tensorflow,karllessard/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,ppwwyyxx/tensorflow,jhseu/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,xzturn/tensorflow,renyi533/tensorflow,karllessard/tensorflow,dancingdan/tensorflow,dongjoon-hyun/tensorflow,annarev/tensorflow,petewarden/tensorflow,snnn/tensorflow,freedomtan/tensorflow,apark263/tensorflow,hehongliang/tensorflow,ageron/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,gautam1858/tensorflow,dancingdan/tensorflow,jhseu/tensorflow,chemelnucfin/tensorflow,seanli9jan/tensorflow,manipopopo/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,adit-chandra/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,hehongliang/tensorflow,AnishShah/tensorflow,karllessard/tensorflow,aldian/tensorflow,renyi533/tensorflow,jart/tensorflow,kevin-coder/tensorflow-fork,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,girving/tensorflow,gunan/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,ghchinoy/tensorflow,aldian/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jendap/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,apark263/tensorflow,brchiu/tensorflow,dancingdan/tensorflow,girving/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,dancingdan/tensorflow,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,asimshankar/tensorflow,xodus7/tensorflow,Intel-Corporation/tensorflow,snnn/tensorflow,alsrgv/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,kevin-coder/tensorflow-fork,Intel-Corporation/tensorflow,ZhangXinNan/tensorflow,paolodedios/tensorflow,gunan/tensorflow,kevin-coder/tensorflow-fork,alsrgv/tensorflow,tensorflow/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,brchiu/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,theflofly/tensorflow,kevin-coder/tensorflow-fork,paolodedios/tensorflow,ghchinoy/tensorflow,dongjoon-hyun/tensorflow,xzturn/tensorflow,snnn/tensorflow,jbedorf/tensorflow,DavidNorman/tensorflow,girving/tensorflow,alsrgv/tensorflow,renyi533/tensorflow,xodus7/tensorflow,tensorflow/tensorflow,aselle/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,ageron/tensorflow,freedomtan/tensorflow,alsrgv/tensorflow,annarev/tensorflow,aldian/tensorflow,ZhangXinNan/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,asimshankar/tensorflow,petewarden/tensorflow,jendap/tensorflow,aldian/tensorflow,karllessard/tensorflow,alsrgv/tensorflow,jart/tensorflow,hfp/tensorflow-xsmm,freedomtan/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,brchiu/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,xodus7/tensorflow,Intel-tensorflow/tensorflow,jhseu/tensorflow,xodus7/tensorflow,karllessard/tensorflow,alshedivat/tensorflow,ageron/tensorflow,aselle/tensorflow,renyi533/tensorflow,yongtang/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Bismarrck/tensorflow,ZhangXinNan/tensorflow,frreiss/tensorflow-fred,seanli9jan/tensorflow,kobejean/tensorflow,yongtang/tensorflow,alshedivat/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,arborh/tensorflow,sarvex/tensorflow,arborh/tensorflow,ZhangXinNan/tensorflow,aam-at/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,sarvex/tensorflow,Bismarrck/tensorflow,asimshankar/tensorflow,seanli9jan/tensorflow,dongjoon-hyun/tensorflow,jalexvig/tensorflow,freedomtan/tensorflow,snnn/tensorflow,dancingdan/tensorflow,aam-at/tensorflow,hfp/tensorflow-xsmm,alshedivat/tensorflow,hfp/tensorflow-xsmm,Intel-Corporation/tensorflow,snnn/tensorflow,gunan/tensorflow,davidzchen/tensorflow,aldian/tensorflow,freedomtan/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,yongtang/tensorflow,karllessard/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,Intel-tensorflow/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,ZhangXinNan/tensorflow,jhseu/tensorflow,adit-chandra/tensorflow,arborh/tensorflow,apark263/tensorflow,karllessard/tensorflow,dancingdan/tensorflow,gautam1858/tensorflow,adit-chandra/tensorflow,aam-at/tensorflow,AnishShah/tensorflow,Intel-Corporation/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,xodus7/tensorflow,aam-at/tensorflow,davidzchen/tensorflow,hfp/tensorflow-xsmm,xzturn/tensorflow,manipopopo/tensorflow,AnishShah/tensorflow,alshedivat/tensorflow,karllessard/tensorflow,jalexvig/tensorflow,AnishShah/tensorflow,cxxgtxy/tensorflow,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_tf_optimizer,jendap/tensorflow,ZhangXinNan/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,snnn/tensorflow,yongtang/tensorflow,ageron/tensorflow,cxxgtxy/tensorflow,jart/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,renyi533/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,aselle/tensorflow,arborh/tensorflow,brchiu/tensorflow,aam-at/tensorflow,cxxgtxy/tensorflow,apark263/tensorflow,hfp/tensorflow-xsmm,xzturn/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,karllessard/tensorflow,xzturn/tensorflow,gunan/tensorflow,aselle/tensorflow,girving/tensorflow,adit-chandra/tensorflow,kobejean/tensorflow,snnn/tensorflow,jendap/tensorflow,xodus7/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow,jalexvig/tensorflow,kobejean/tensorflow,alshedivat/tensorflow,ageron/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,ghchinoy/tensorflow,aldian/tensorflow,gunan/tensorflow,brchiu/tensorflow,brchiu/tensorflow,snnn/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,kevin-coder/tensorflow-fork,alshedivat/tensorflow,ppwwyyxx/tensorflow,annarev/tensorflow,chemelnucfin/tensorflow,jhseu/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,jhseu/tensorflow,adit-chandra/tensorflow,ppwwyyxx/tensorflow,kobejean/tensorflow,jart/tensorflow,dongjoon-hyun/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,ghchinoy/tensorflow,ppwwyyxx/tensorflow,hehongliang/tensorflow,hfp/tensorflow-xsmm,apark263/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,xzturn/tensorflow,Intel-Corporation/tensorflow,jalexvig/tensorflow,manipopopo/tensorflow,renyi533/tensorflow,hehongliang/tensorflow,AnishShah/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,Bismarrck/tensorflow,jendap/tensorflow,yongtang/tensorflow,brchiu/tensorflow,paolodedios/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,cxxgtxy/tensorflow,jbedorf/tensorflow,karllessard/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,annarev/tensorflow,ageron/tensorflow,apark263/tensorflow,kevin-coder/tensorflow-fork,jbedorf/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,petewarden/tensorflow,dongjoon-hyun/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,alshedivat/tensorflow
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'pfor',
'for_loop',
'jacobian',
'batch_jacobian',
]
remove_undocumented(__name__, _allowed_symbols)
Remove usage of remove_undocumented from core parallel_for.
remove_undocumented is causing issues with our pip tests.
remove_undocumented is not used anywhere else in core TF code
and we have a new mechanism for annotating the public TF API.
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
|
<commit_before># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'pfor',
'for_loop',
'jacobian',
'batch_jacobian',
]
remove_undocumented(__name__, _allowed_symbols)
<commit_msg>Remove usage of remove_undocumented from core parallel_for.
remove_undocumented is causing issues with our pip tests.
remove_undocumented is not used anywhere else in core TF code
and we have a new mechanism for annotating the public TF API.<commit_after>
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'pfor',
'for_loop',
'jacobian',
'batch_jacobian',
]
remove_undocumented(__name__, _allowed_symbols)
Remove usage of remove_undocumented from core parallel_for.
remove_undocumented is causing issues with our pip tests.
remove_undocumented is not used anywhere else in core TF code
and we have a new mechanism for annotating the public TF API.# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
|
<commit_before># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'pfor',
'for_loop',
'jacobian',
'batch_jacobian',
]
remove_undocumented(__name__, _allowed_symbols)
<commit_msg>Remove usage of remove_undocumented from core parallel_for.
remove_undocumented is causing issues with our pip tests.
remove_undocumented is not used anywhere else in core TF code
and we have a new mechanism for annotating the public TF API.<commit_after># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for pfor, for_loop, jacobian."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops.parallel_for import * # pylint: disable=wildcard-import
from tensorflow.python.ops.parallel_for.control_flow_ops import for_loop
from tensorflow.python.ops.parallel_for.control_flow_ops import pfor
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from tensorflow.python.ops.parallel_for.gradients import jacobian
|
c7f6ac0dd56b755a25c215d02a540919e1cc92a3
|
gmmp/management/commands/map_weights_to_codes.py
|
gmmp/management/commands/map_weights_to_codes.py
|
import csv
from pprint import pprint
from django.core.management.base import BaseCommand
from django_countries import countries
from forms.modelutils import CountryRegion
class Command(BaseCommand):
args = 'input_file output_file'
help = 'Maps the given country names to there codes and regions.'
def handle(self, *args, **options):
country_weightings = {}
with open(args[1], 'wb') as output:
with open(args[0]) as csvfile:
writer = csv.writer(output)
reader = csv.DictReader(csvfile)
writer.writerow(['Country', 'Region', 'Print', 'Radio', 'TV', 'Online'])
for row in reader:
if row['Country'] == "Ivory Coast":
row['Country'] = u"C\xf4te d'Ivoire"
code = countries.by_name(row['Country'])
region = CountryRegion.objects.get(country=code).region
if not code:
self.stdout.write('Country not found %s' % row['Country'])
break
writer.writerow([
code, region, row['Print'], row['Radio'], row['TV'], row['Online']
])
country_weightings[code] = {
'Region': region,
'Print': row['Print'],
'Radio': row['Radio'],
'Television': row['TV'],
'Internet': row['Online']
}
pprint(country_weightings)
|
Add management command to import weightings
|
Add management command to import weightings
|
Python
|
apache-2.0
|
Code4SA/gmmp,Code4SA/gmmp,Code4SA/gmmp
|
Add management command to import weightings
|
import csv
from pprint import pprint
from django.core.management.base import BaseCommand
from django_countries import countries
from forms.modelutils import CountryRegion
class Command(BaseCommand):
args = 'input_file output_file'
help = 'Maps the given country names to there codes and regions.'
def handle(self, *args, **options):
country_weightings = {}
with open(args[1], 'wb') as output:
with open(args[0]) as csvfile:
writer = csv.writer(output)
reader = csv.DictReader(csvfile)
writer.writerow(['Country', 'Region', 'Print', 'Radio', 'TV', 'Online'])
for row in reader:
if row['Country'] == "Ivory Coast":
row['Country'] = u"C\xf4te d'Ivoire"
code = countries.by_name(row['Country'])
region = CountryRegion.objects.get(country=code).region
if not code:
self.stdout.write('Country not found %s' % row['Country'])
break
writer.writerow([
code, region, row['Print'], row['Radio'], row['TV'], row['Online']
])
country_weightings[code] = {
'Region': region,
'Print': row['Print'],
'Radio': row['Radio'],
'Television': row['TV'],
'Internet': row['Online']
}
pprint(country_weightings)
|
<commit_before><commit_msg>Add management command to import weightings<commit_after>
|
import csv
from pprint import pprint
from django.core.management.base import BaseCommand
from django_countries import countries
from forms.modelutils import CountryRegion
class Command(BaseCommand):
args = 'input_file output_file'
help = 'Maps the given country names to there codes and regions.'
def handle(self, *args, **options):
country_weightings = {}
with open(args[1], 'wb') as output:
with open(args[0]) as csvfile:
writer = csv.writer(output)
reader = csv.DictReader(csvfile)
writer.writerow(['Country', 'Region', 'Print', 'Radio', 'TV', 'Online'])
for row in reader:
if row['Country'] == "Ivory Coast":
row['Country'] = u"C\xf4te d'Ivoire"
code = countries.by_name(row['Country'])
region = CountryRegion.objects.get(country=code).region
if not code:
self.stdout.write('Country not found %s' % row['Country'])
break
writer.writerow([
code, region, row['Print'], row['Radio'], row['TV'], row['Online']
])
country_weightings[code] = {
'Region': region,
'Print': row['Print'],
'Radio': row['Radio'],
'Television': row['TV'],
'Internet': row['Online']
}
pprint(country_weightings)
|
Add management command to import weightingsimport csv
from pprint import pprint
from django.core.management.base import BaseCommand
from django_countries import countries
from forms.modelutils import CountryRegion
class Command(BaseCommand):
args = 'input_file output_file'
help = 'Maps the given country names to there codes and regions.'
def handle(self, *args, **options):
country_weightings = {}
with open(args[1], 'wb') as output:
with open(args[0]) as csvfile:
writer = csv.writer(output)
reader = csv.DictReader(csvfile)
writer.writerow(['Country', 'Region', 'Print', 'Radio', 'TV', 'Online'])
for row in reader:
if row['Country'] == "Ivory Coast":
row['Country'] = u"C\xf4te d'Ivoire"
code = countries.by_name(row['Country'])
region = CountryRegion.objects.get(country=code).region
if not code:
self.stdout.write('Country not found %s' % row['Country'])
break
writer.writerow([
code, region, row['Print'], row['Radio'], row['TV'], row['Online']
])
country_weightings[code] = {
'Region': region,
'Print': row['Print'],
'Radio': row['Radio'],
'Television': row['TV'],
'Internet': row['Online']
}
pprint(country_weightings)
|
<commit_before><commit_msg>Add management command to import weightings<commit_after>import csv
from pprint import pprint
from django.core.management.base import BaseCommand
from django_countries import countries
from forms.modelutils import CountryRegion
class Command(BaseCommand):
args = 'input_file output_file'
help = 'Maps the given country names to there codes and regions.'
def handle(self, *args, **options):
country_weightings = {}
with open(args[1], 'wb') as output:
with open(args[0]) as csvfile:
writer = csv.writer(output)
reader = csv.DictReader(csvfile)
writer.writerow(['Country', 'Region', 'Print', 'Radio', 'TV', 'Online'])
for row in reader:
if row['Country'] == "Ivory Coast":
row['Country'] = u"C\xf4te d'Ivoire"
code = countries.by_name(row['Country'])
region = CountryRegion.objects.get(country=code).region
if not code:
self.stdout.write('Country not found %s' % row['Country'])
break
writer.writerow([
code, region, row['Print'], row['Radio'], row['TV'], row['Online']
])
country_weightings[code] = {
'Region': region,
'Print': row['Print'],
'Radio': row['Radio'],
'Television': row['TV'],
'Internet': row['Online']
}
pprint(country_weightings)
|
|
5043d74db37e3e3f955f51782164fae45a780d9e
|
test/test_message.py
|
test/test_message.py
|
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(r'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
|
Test quoted-printable is always applied to text
|
Test quoted-printable is always applied to text
|
Python
|
mit
|
jwodder/daemail
|
Test quoted-printable is always applied to text
|
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(r'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
|
<commit_before><commit_msg>Test quoted-printable is always applied to text<commit_after>
|
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(r'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
|
Test quoted-printable is always applied to textimport quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(r'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
|
<commit_before><commit_msg>Test quoted-printable is always applied to text<commit_after>import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(r'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
|
|
bf5ed92820a347fe8ecc34b8b2b204c44276a392
|
scooby_doo.py
|
scooby_doo.py
|
#!/usr/bin/env python3
for i in range(1,101):
if i%3 == 0 and i%7==0:
print("Scooby Doo")
elif i%3 == 0:
print("Scooby")
elif i%7 == 0:
print("Doo")
else:
print(i)
|
Add solution to Scooby doo problem
|
Add solution to Scooby doo problem
|
Python
|
mit
|
khusi-anu/dgplug-python
|
Add solution to Scooby doo problem
|
#!/usr/bin/env python3
for i in range(1,101):
if i%3 == 0 and i%7==0:
print("Scooby Doo")
elif i%3 == 0:
print("Scooby")
elif i%7 == 0:
print("Doo")
else:
print(i)
|
<commit_before><commit_msg>Add solution to Scooby doo problem<commit_after>
|
#!/usr/bin/env python3
for i in range(1,101):
if i%3 == 0 and i%7==0:
print("Scooby Doo")
elif i%3 == 0:
print("Scooby")
elif i%7 == 0:
print("Doo")
else:
print(i)
|
Add solution to Scooby doo problem#!/usr/bin/env python3
for i in range(1,101):
if i%3 == 0 and i%7==0:
print("Scooby Doo")
elif i%3 == 0:
print("Scooby")
elif i%7 == 0:
print("Doo")
else:
print(i)
|
<commit_before><commit_msg>Add solution to Scooby doo problem<commit_after>#!/usr/bin/env python3
for i in range(1,101):
if i%3 == 0 and i%7==0:
print("Scooby Doo")
elif i%3 == 0:
print("Scooby")
elif i%7 == 0:
print("Doo")
else:
print(i)
|
|
d176a959473f279073fc1df8ed09d71f9a2ca1b4
|
custom/aaa/migrations/0008_auto_20190410_1952.py
|
custom/aaa/migrations/0008_auto_20190410_1952.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-10 19:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aaa', '0007_auto_20190319_2225'),
]
operations = [
migrations.AlterField(
model_name='aggawc',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggawc',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggawc',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
]
|
Add migration after help_text changes
|
Add migration after help_text changes
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add migration after help_text changes
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-10 19:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aaa', '0007_auto_20190319_2225'),
]
operations = [
migrations.AlterField(
model_name='aggawc',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggawc',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggawc',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
]
|
<commit_before><commit_msg>Add migration after help_text changes<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-10 19:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aaa', '0007_auto_20190319_2225'),
]
operations = [
migrations.AlterField(
model_name='aggawc',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggawc',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggawc',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
]
|
Add migration after help_text changes# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-10 19:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aaa', '0007_auto_20190319_2225'),
]
operations = [
migrations.AlterField(
model_name='aggawc',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggawc',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggawc',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
]
|
<commit_before><commit_msg>Add migration after help_text changes<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-10 19:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aaa', '0007_auto_20190319_2225'),
]
operations = [
migrations.AlterField(
model_name='aggawc',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggawc',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggawc',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='high_risk_pregnancies',
field=models.PositiveIntegerField(help_text='hrp=yes when the ccs record was open and pregnant during the month', null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='institutional_deliveries',
field=models.PositiveIntegerField(help_text="add in this month and child_birth_location = 'hospital' regardless of open status", null=True),
),
migrations.AlterField(
model_name='aggvillage',
name='total_deliveries',
field=models.PositiveIntegerField(help_text='add in this month regardless of open status', null=True),
),
]
|
|
84ae17aaf19330a57f96171bd6bcda94e0dbb7d0
|
tests/test_exception.py
|
tests/test_exception.py
|
# -*- coding: utf-8 -*-
import pytest
from wait_for import wait_for, TimedOutError
class MyError(Exception):
"""A sample exception for use by the tests in this module."""
def raise_my_error():
"""Raise ``MyError``."""
raise MyError()
def test_handle_exception_v1():
"""Don't set ``handle_exception``.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error)
def test_handle_exception_v2():
"""Set ``handle_exception`` to false.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error, handle_exception=False)
def test_handle_exception_v3():
"""Set ``handle_exception`` to true.
An exception raised by the waited-upon function should not bubble up, and a
``TimedOutError`` should be raised instead.
"""
with pytest.raises(TimedOutError):
wait_for(raise_my_error, handle_exception=True, num_sec=0.1)
def test_handle_exception_silent_failure_v1():
"""Set both ``handle_exception`` and ``silent_failure`` to true.
The time spent calling the waited-upon function should be returned.
"""
_, num_sec = _call_handle_exception_silent_failure()
assert isinstance(num_sec, float)
def _call_handle_exception_silent_failure():
return wait_for(
raise_my_error,
handle_exception=True,
num_sec=0.1,
silent_failure=True,
)
|
Add some unit tests for exception handling
|
Add some unit tests for exception handling
|
Python
|
apache-2.0
|
RedHatQE/wait_for
|
Add some unit tests for exception handling
|
# -*- coding: utf-8 -*-
import pytest
from wait_for import wait_for, TimedOutError
class MyError(Exception):
"""A sample exception for use by the tests in this module."""
def raise_my_error():
"""Raise ``MyError``."""
raise MyError()
def test_handle_exception_v1():
"""Don't set ``handle_exception``.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error)
def test_handle_exception_v2():
"""Set ``handle_exception`` to false.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error, handle_exception=False)
def test_handle_exception_v3():
"""Set ``handle_exception`` to true.
An exception raised by the waited-upon function should not bubble up, and a
``TimedOutError`` should be raised instead.
"""
with pytest.raises(TimedOutError):
wait_for(raise_my_error, handle_exception=True, num_sec=0.1)
def test_handle_exception_silent_failure_v1():
"""Set both ``handle_exception`` and ``silent_failure`` to true.
The time spent calling the waited-upon function should be returned.
"""
_, num_sec = _call_handle_exception_silent_failure()
assert isinstance(num_sec, float)
def _call_handle_exception_silent_failure():
return wait_for(
raise_my_error,
handle_exception=True,
num_sec=0.1,
silent_failure=True,
)
|
<commit_before><commit_msg>Add some unit tests for exception handling<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from wait_for import wait_for, TimedOutError
class MyError(Exception):
"""A sample exception for use by the tests in this module."""
def raise_my_error():
"""Raise ``MyError``."""
raise MyError()
def test_handle_exception_v1():
"""Don't set ``handle_exception``.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error)
def test_handle_exception_v2():
"""Set ``handle_exception`` to false.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error, handle_exception=False)
def test_handle_exception_v3():
"""Set ``handle_exception`` to true.
An exception raised by the waited-upon function should not bubble up, and a
``TimedOutError`` should be raised instead.
"""
with pytest.raises(TimedOutError):
wait_for(raise_my_error, handle_exception=True, num_sec=0.1)
def test_handle_exception_silent_failure_v1():
"""Set both ``handle_exception`` and ``silent_failure`` to true.
The time spent calling the waited-upon function should be returned.
"""
_, num_sec = _call_handle_exception_silent_failure()
assert isinstance(num_sec, float)
def _call_handle_exception_silent_failure():
return wait_for(
raise_my_error,
handle_exception=True,
num_sec=0.1,
silent_failure=True,
)
|
Add some unit tests for exception handling# -*- coding: utf-8 -*-
import pytest
from wait_for import wait_for, TimedOutError
class MyError(Exception):
"""A sample exception for use by the tests in this module."""
def raise_my_error():
"""Raise ``MyError``."""
raise MyError()
def test_handle_exception_v1():
"""Don't set ``handle_exception``.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error)
def test_handle_exception_v2():
"""Set ``handle_exception`` to false.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error, handle_exception=False)
def test_handle_exception_v3():
"""Set ``handle_exception`` to true.
An exception raised by the waited-upon function should not bubble up, and a
``TimedOutError`` should be raised instead.
"""
with pytest.raises(TimedOutError):
wait_for(raise_my_error, handle_exception=True, num_sec=0.1)
def test_handle_exception_silent_failure_v1():
"""Set both ``handle_exception`` and ``silent_failure`` to true.
The time spent calling the waited-upon function should be returned.
"""
_, num_sec = _call_handle_exception_silent_failure()
assert isinstance(num_sec, float)
def _call_handle_exception_silent_failure():
return wait_for(
raise_my_error,
handle_exception=True,
num_sec=0.1,
silent_failure=True,
)
|
<commit_before><commit_msg>Add some unit tests for exception handling<commit_after># -*- coding: utf-8 -*-
import pytest
from wait_for import wait_for, TimedOutError
class MyError(Exception):
"""A sample exception for use by the tests in this module."""
def raise_my_error():
"""Raise ``MyError``."""
raise MyError()
def test_handle_exception_v1():
"""Don't set ``handle_exception``.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error)
def test_handle_exception_v2():
"""Set ``handle_exception`` to false.
An exception raised by the waited-upon function should bubble up.
"""
with pytest.raises(MyError):
wait_for(raise_my_error, handle_exception=False)
def test_handle_exception_v3():
"""Set ``handle_exception`` to true.
An exception raised by the waited-upon function should not bubble up, and a
``TimedOutError`` should be raised instead.
"""
with pytest.raises(TimedOutError):
wait_for(raise_my_error, handle_exception=True, num_sec=0.1)
def test_handle_exception_silent_failure_v1():
"""Set both ``handle_exception`` and ``silent_failure`` to true.
The time spent calling the waited-upon function should be returned.
"""
_, num_sec = _call_handle_exception_silent_failure()
assert isinstance(num_sec, float)
def _call_handle_exception_silent_failure():
return wait_for(
raise_my_error,
handle_exception=True,
num_sec=0.1,
silent_failure=True,
)
|
|
7664670f43ec9ea2a28df436d2e87d3259d6e519
|
document/management/commands/clean_submitters.py
|
document/management/commands/clean_submitters.py
|
import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from document.models import Document
from document.models import Submitter
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
self.do()
@transaction.atomic
def do(self):
submitters = Submitter.objects.all()
documents = Document.objects.all()
counter = 1
n_docs = documents.count()
print(n_docs)
for document in documents:
print('document ' + str(counter) + '/' + str(n_docs))
doc_submitters = submitters.filter(document=document)
person_ids = []
for doc_sub in doc_submitters:
if doc_sub.person.id in person_ids:
doc_sub.delete()
else:
person_ids.append(doc_sub.person.id)
counter += 1
|
Create command to remove duplicate submitters
|
Create command to remove duplicate submitters
|
Python
|
mit
|
openkamer/openkamer,openkamer/openkamer,openkamer/openkamer,openkamer/openkamer
|
Create command to remove duplicate submitters
|
import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from document.models import Document
from document.models import Submitter
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
self.do()
@transaction.atomic
def do(self):
submitters = Submitter.objects.all()
documents = Document.objects.all()
counter = 1
n_docs = documents.count()
print(n_docs)
for document in documents:
print('document ' + str(counter) + '/' + str(n_docs))
doc_submitters = submitters.filter(document=document)
person_ids = []
for doc_sub in doc_submitters:
if doc_sub.person.id in person_ids:
doc_sub.delete()
else:
person_ids.append(doc_sub.person.id)
counter += 1
|
<commit_before><commit_msg>Create command to remove duplicate submitters<commit_after>
|
import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from document.models import Document
from document.models import Submitter
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
self.do()
@transaction.atomic
def do(self):
submitters = Submitter.objects.all()
documents = Document.objects.all()
counter = 1
n_docs = documents.count()
print(n_docs)
for document in documents:
print('document ' + str(counter) + '/' + str(n_docs))
doc_submitters = submitters.filter(document=document)
person_ids = []
for doc_sub in doc_submitters:
if doc_sub.person.id in person_ids:
doc_sub.delete()
else:
person_ids.append(doc_sub.person.id)
counter += 1
|
Create command to remove duplicate submittersimport logging
from django.core.management.base import BaseCommand
from django.db import transaction
from document.models import Document
from document.models import Submitter
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
self.do()
@transaction.atomic
def do(self):
submitters = Submitter.objects.all()
documents = Document.objects.all()
counter = 1
n_docs = documents.count()
print(n_docs)
for document in documents:
print('document ' + str(counter) + '/' + str(n_docs))
doc_submitters = submitters.filter(document=document)
person_ids = []
for doc_sub in doc_submitters:
if doc_sub.person.id in person_ids:
doc_sub.delete()
else:
person_ids.append(doc_sub.person.id)
counter += 1
|
<commit_before><commit_msg>Create command to remove duplicate submitters<commit_after>import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from document.models import Document
from document.models import Submitter
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
self.do()
@transaction.atomic
def do(self):
submitters = Submitter.objects.all()
documents = Document.objects.all()
counter = 1
n_docs = documents.count()
print(n_docs)
for document in documents:
print('document ' + str(counter) + '/' + str(n_docs))
doc_submitters = submitters.filter(document=document)
person_ids = []
for doc_sub in doc_submitters:
if doc_sub.person.id in person_ids:
doc_sub.delete()
else:
person_ids.append(doc_sub.person.id)
counter += 1
|
|
f2dab6c81e8d0308dede549549a1b6f3b1ab78cf
|
analysis/check_files.py
|
analysis/check_files.py
|
#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import os
import logging
import sys
import yaml
try:
# Try to use libyaml which is faster
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
# fall back on python implementation
from yaml import Loader, Dumper
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument('first_yml', type=argparse.FileType('r'))
parser.add_argument('second_yml', type=argparse.FileType('r'))
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
logging.basicConfig(level=logLevel)
firstResults = yaml.load(pargs.first_yml, Loader=Loader)
secondResults = yaml.load(pargs.second_yml, Loader=Loader)
assert isinstance(firstResults, list)
assert isinstance(secondResults, list)
if len(firstResults) == 0:
logging.error('First Result list is empty')
return 1
if len(secondResults) == 0:
logging.error('Second Result list is empty')
return 1
# Create set of all used files
programsInFirst = set()
programsInSecond = set()
for r in firstResults:
programsInFirst.add(r['program'])
for r in secondResults:
programsInSecond.add(r['program'])
resultMissingFromSecond= [ ]
resultMissingFromFirst=[ ]
# Check for files missing in second
for r in firstResults:
if not (r['program'] in programsInSecond):
resultMissingFromSecond.append(r)
logging.warning('Program {} is missing from second but present in first'.format(r['program']))
# Check for files missing in first
for r in secondResults:
if not (r['program'] in programsInFirst):
resultMissingFromFirst.append(r)
logging.warning('Program {} is missing from first but present in second'.format(r['program']))
print("# of programs missing from second but present in first: {}".format(len(resultMissingFromSecond)))
print("# of programs missing from first but present in second: {}".format(len(resultMissingFromFirst)))
print("")
print("# Missing from second")
for r in resultMissingFromSecond:
print(r)
print("# Missing from first")
for r in resultMissingFromFirst:
print(r)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Add script to detect missing results between yaml files that we intend to compare.
|
Add script to detect missing results between yaml files that we
intend to compare.
|
Python
|
bsd-3-clause
|
symbooglix/boogie-runner,symbooglix/boogie-runner
|
Add script to detect missing results between yaml files that we
intend to compare.
|
#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import os
import logging
import sys
import yaml
try:
# Try to use libyaml which is faster
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
# fall back on python implementation
from yaml import Loader, Dumper
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument('first_yml', type=argparse.FileType('r'))
parser.add_argument('second_yml', type=argparse.FileType('r'))
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
logging.basicConfig(level=logLevel)
firstResults = yaml.load(pargs.first_yml, Loader=Loader)
secondResults = yaml.load(pargs.second_yml, Loader=Loader)
assert isinstance(firstResults, list)
assert isinstance(secondResults, list)
if len(firstResults) == 0:
logging.error('First Result list is empty')
return 1
if len(secondResults) == 0:
logging.error('Second Result list is empty')
return 1
# Create set of all used files
programsInFirst = set()
programsInSecond = set()
for r in firstResults:
programsInFirst.add(r['program'])
for r in secondResults:
programsInSecond.add(r['program'])
resultMissingFromSecond= [ ]
resultMissingFromFirst=[ ]
# Check for files missing in second
for r in firstResults:
if not (r['program'] in programsInSecond):
resultMissingFromSecond.append(r)
logging.warning('Program {} is missing from second but present in first'.format(r['program']))
# Check for files missing in first
for r in secondResults:
if not (r['program'] in programsInFirst):
resultMissingFromFirst.append(r)
logging.warning('Program {} is missing from first but present in second'.format(r['program']))
print("# of programs missing from second but present in first: {}".format(len(resultMissingFromSecond)))
print("# of programs missing from first but present in second: {}".format(len(resultMissingFromFirst)))
print("")
print("# Missing from second")
for r in resultMissingFromSecond:
print(r)
print("# Missing from first")
for r in resultMissingFromFirst:
print(r)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add script to detect missing results between yaml files that we
intend to compare.<commit_after>
|
#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import os
import logging
import sys
import yaml
try:
# Try to use libyaml which is faster
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
# fall back on python implementation
from yaml import Loader, Dumper
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument('first_yml', type=argparse.FileType('r'))
parser.add_argument('second_yml', type=argparse.FileType('r'))
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
logging.basicConfig(level=logLevel)
firstResults = yaml.load(pargs.first_yml, Loader=Loader)
secondResults = yaml.load(pargs.second_yml, Loader=Loader)
assert isinstance(firstResults, list)
assert isinstance(secondResults, list)
if len(firstResults) == 0:
logging.error('First Result list is empty')
return 1
if len(secondResults) == 0:
logging.error('Second Result list is empty')
return 1
# Create set of all used files
programsInFirst = set()
programsInSecond = set()
for r in firstResults:
programsInFirst.add(r['program'])
for r in secondResults:
programsInSecond.add(r['program'])
resultMissingFromSecond= [ ]
resultMissingFromFirst=[ ]
# Check for files missing in second
for r in firstResults:
if not (r['program'] in programsInSecond):
resultMissingFromSecond.append(r)
logging.warning('Program {} is missing from second but present in first'.format(r['program']))
# Check for files missing in first
for r in secondResults:
if not (r['program'] in programsInFirst):
resultMissingFromFirst.append(r)
logging.warning('Program {} is missing from first but present in second'.format(r['program']))
print("# of programs missing from second but present in first: {}".format(len(resultMissingFromSecond)))
print("# of programs missing from first but present in second: {}".format(len(resultMissingFromFirst)))
print("")
print("# Missing from second")
for r in resultMissingFromSecond:
print(r)
print("# Missing from first")
for r in resultMissingFromFirst:
print(r)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Add script to detect missing results between yaml files that we
intend to compare.#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import os
import logging
import sys
import yaml
try:
# Try to use libyaml which is faster
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
# fall back on python implementation
from yaml import Loader, Dumper
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument('first_yml', type=argparse.FileType('r'))
parser.add_argument('second_yml', type=argparse.FileType('r'))
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
logging.basicConfig(level=logLevel)
firstResults = yaml.load(pargs.first_yml, Loader=Loader)
secondResults = yaml.load(pargs.second_yml, Loader=Loader)
assert isinstance(firstResults, list)
assert isinstance(secondResults, list)
if len(firstResults) == 0:
logging.error('First Result list is empty')
return 1
if len(secondResults) == 0:
logging.error('Second Result list is empty')
return 1
# Create set of all used files
programsInFirst = set()
programsInSecond = set()
for r in firstResults:
programsInFirst.add(r['program'])
for r in secondResults:
programsInSecond.add(r['program'])
resultMissingFromSecond= [ ]
resultMissingFromFirst=[ ]
# Check for files missing in second
for r in firstResults:
if not (r['program'] in programsInSecond):
resultMissingFromSecond.append(r)
logging.warning('Program {} is missing from second but present in first'.format(r['program']))
# Check for files missing in first
for r in secondResults:
if not (r['program'] in programsInFirst):
resultMissingFromFirst.append(r)
logging.warning('Program {} is missing from first but present in second'.format(r['program']))
print("# of programs missing from second but present in first: {}".format(len(resultMissingFromSecond)))
print("# of programs missing from first but present in second: {}".format(len(resultMissingFromFirst)))
print("")
print("# Missing from second")
for r in resultMissingFromSecond:
print(r)
print("# Missing from first")
for r in resultMissingFromFirst:
print(r)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add script to detect missing results between yaml files that we
intend to compare.<commit_after>#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import os
import logging
import sys
import yaml
try:
# Try to use libyaml which is faster
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
# fall back on python implementation
from yaml import Loader, Dumper
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument('first_yml', type=argparse.FileType('r'))
parser.add_argument('second_yml', type=argparse.FileType('r'))
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
logging.basicConfig(level=logLevel)
firstResults = yaml.load(pargs.first_yml, Loader=Loader)
secondResults = yaml.load(pargs.second_yml, Loader=Loader)
assert isinstance(firstResults, list)
assert isinstance(secondResults, list)
if len(firstResults) == 0:
logging.error('First Result list is empty')
return 1
if len(secondResults) == 0:
logging.error('Second Result list is empty')
return 1
# Create set of all used files
programsInFirst = set()
programsInSecond = set()
for r in firstResults:
programsInFirst.add(r['program'])
for r in secondResults:
programsInSecond.add(r['program'])
resultMissingFromSecond= [ ]
resultMissingFromFirst=[ ]
# Check for files missing in second
for r in firstResults:
if not (r['program'] in programsInSecond):
resultMissingFromSecond.append(r)
logging.warning('Program {} is missing from second but present in first'.format(r['program']))
# Check for files missing in first
for r in secondResults:
if not (r['program'] in programsInFirst):
resultMissingFromFirst.append(r)
logging.warning('Program {} is missing from first but present in second'.format(r['program']))
print("# of programs missing from second but present in first: {}".format(len(resultMissingFromSecond)))
print("# of programs missing from first but present in second: {}".format(len(resultMissingFromFirst)))
print("")
print("# Missing from second")
for r in resultMissingFromSecond:
print(r)
print("# Missing from first")
for r in resultMissingFromFirst:
print(r)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
|
aee5bdbb09cd12478e4cdccb3db33dc3f5ffc498
|
app/tests/test_utils.py
|
app/tests/test_utils.py
|
from redidropper.utils import clean_int, allowed_file
#import unittest
#class TestUtils(unittest.TestCase):
def test_clean_int():
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = clean_int(case['x'])
expected = case['exp']
assert actual == expected
def test_allowed_file():
cases = [
{"x": None, "exp": False},
{"x": "", "exp": False},
{"x": " ", "exp": False},
{"x": " 0x0", "exp": False},
{"x": "x.rar", "exp": False},
{"x": "tgz", "exp": False},
{"x": "a .txt", "exp": True},
{"x": "b.pdf", "exp": True},
{"x": "c.png", "exp": True},
{"x": "d.jpg", "exp": True},
{"x": "e.jpeg", "exp": True},
{"x": "f.gif", "exp": True},
{"x": "g.tiff", "exp": True},
{"x": "h.zip", "exp": True},
{"x": "i.tar", "exp": True},
{"x": "j.tgz", "exp": True},
{"x": "k.bz2", "exp": True},
]
for case in cases:
actual = allowed_file(case['x'])
expected = case['exp']
assert actual == expected
|
Add two tests for utils.py
|
Add two tests for utils.py
|
Python
|
bsd-3-clause
|
indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client
|
Add two tests for utils.py
|
from redidropper.utils import clean_int, allowed_file
#import unittest
#class TestUtils(unittest.TestCase):
def test_clean_int():
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = clean_int(case['x'])
expected = case['exp']
assert actual == expected
def test_allowed_file():
cases = [
{"x": None, "exp": False},
{"x": "", "exp": False},
{"x": " ", "exp": False},
{"x": " 0x0", "exp": False},
{"x": "x.rar", "exp": False},
{"x": "tgz", "exp": False},
{"x": "a .txt", "exp": True},
{"x": "b.pdf", "exp": True},
{"x": "c.png", "exp": True},
{"x": "d.jpg", "exp": True},
{"x": "e.jpeg", "exp": True},
{"x": "f.gif", "exp": True},
{"x": "g.tiff", "exp": True},
{"x": "h.zip", "exp": True},
{"x": "i.tar", "exp": True},
{"x": "j.tgz", "exp": True},
{"x": "k.bz2", "exp": True},
]
for case in cases:
actual = allowed_file(case['x'])
expected = case['exp']
assert actual == expected
|
<commit_before><commit_msg>Add two tests for utils.py<commit_after>
|
from redidropper.utils import clean_int, allowed_file
#import unittest
#class TestUtils(unittest.TestCase):
def test_clean_int():
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = clean_int(case['x'])
expected = case['exp']
assert actual == expected
def test_allowed_file():
cases = [
{"x": None, "exp": False},
{"x": "", "exp": False},
{"x": " ", "exp": False},
{"x": " 0x0", "exp": False},
{"x": "x.rar", "exp": False},
{"x": "tgz", "exp": False},
{"x": "a .txt", "exp": True},
{"x": "b.pdf", "exp": True},
{"x": "c.png", "exp": True},
{"x": "d.jpg", "exp": True},
{"x": "e.jpeg", "exp": True},
{"x": "f.gif", "exp": True},
{"x": "g.tiff", "exp": True},
{"x": "h.zip", "exp": True},
{"x": "i.tar", "exp": True},
{"x": "j.tgz", "exp": True},
{"x": "k.bz2", "exp": True},
]
for case in cases:
actual = allowed_file(case['x'])
expected = case['exp']
assert actual == expected
|
Add two tests for utils.pyfrom redidropper.utils import clean_int, allowed_file
#import unittest
#class TestUtils(unittest.TestCase):
def test_clean_int():
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = clean_int(case['x'])
expected = case['exp']
assert actual == expected
def test_allowed_file():
cases = [
{"x": None, "exp": False},
{"x": "", "exp": False},
{"x": " ", "exp": False},
{"x": " 0x0", "exp": False},
{"x": "x.rar", "exp": False},
{"x": "tgz", "exp": False},
{"x": "a .txt", "exp": True},
{"x": "b.pdf", "exp": True},
{"x": "c.png", "exp": True},
{"x": "d.jpg", "exp": True},
{"x": "e.jpeg", "exp": True},
{"x": "f.gif", "exp": True},
{"x": "g.tiff", "exp": True},
{"x": "h.zip", "exp": True},
{"x": "i.tar", "exp": True},
{"x": "j.tgz", "exp": True},
{"x": "k.bz2", "exp": True},
]
for case in cases:
actual = allowed_file(case['x'])
expected = case['exp']
assert actual == expected
|
<commit_before><commit_msg>Add two tests for utils.py<commit_after>from redidropper.utils import clean_int, allowed_file
#import unittest
#class TestUtils(unittest.TestCase):
def test_clean_int():
"""
Verify common cases
"""
cases = [
{"x": None, "exp": None},
{"x": "", "exp": None},
{"x": " ", "exp": None},
{"x": " 0x0", "exp": None},
{"x": "-1", "exp": None},
{"x": "-0.3", "exp": None},
{"x": "0.0", "exp": None},
{"x": "0", "exp": 0},
{"x": "0.3", "exp": None},
{"x": "01", "exp": 1},
{"x": "2", "exp": 2},
{"x": 3, "exp": 3},
{"x": 1.2, "exp": None},
{"x": 123, "exp": 123},
]
for case in cases:
actual = clean_int(case['x'])
expected = case['exp']
assert actual == expected
def test_allowed_file():
cases = [
{"x": None, "exp": False},
{"x": "", "exp": False},
{"x": " ", "exp": False},
{"x": " 0x0", "exp": False},
{"x": "x.rar", "exp": False},
{"x": "tgz", "exp": False},
{"x": "a .txt", "exp": True},
{"x": "b.pdf", "exp": True},
{"x": "c.png", "exp": True},
{"x": "d.jpg", "exp": True},
{"x": "e.jpeg", "exp": True},
{"x": "f.gif", "exp": True},
{"x": "g.tiff", "exp": True},
{"x": "h.zip", "exp": True},
{"x": "i.tar", "exp": True},
{"x": "j.tgz", "exp": True},
{"x": "k.bz2", "exp": True},
]
for case in cases:
actual = allowed_file(case['x'])
expected = case['exp']
assert actual == expected
|
|
9a3128c4994eb241d6a69d33606e3aeacfa6475b
|
learntris.py
|
learntris.py
|
#!/usr/bin/python2.7
import sys
WIDTH = 10
LENGTH = 22
def main():
command_line_arguments = sys.argv
print command_line_arguments[0]
if len(command_line_arguments) >= 2:
arguments(command_line_arguments)
else:
print "no arguments passed"
def arguments(command_line_arguments):
print_code = 'p'
quit = 'q'
if print_code in command_line_arguments:
draw(WIDTH, LENGTH)
elif quit in command_line_arguments:
pass
else:
print "no valid arguments passed, pass an argument to do something"
return
def draw(width, length):
matrix = ""
for y in range(length):
for x in range(width):
matrix += "." + " "
matrix.rstrip()
matrix += "\n"
print matrix
return
if __name__ == '__main__':
main()
|
Use command line arguments to tell it what to do
|
Use command line arguments to tell it what to do
|
Python
|
mit
|
SultansDream/testris
|
Use command line arguments to tell it what to do
|
#!/usr/bin/python2.7
import sys
WIDTH = 10
LENGTH = 22
def main():
command_line_arguments = sys.argv
print command_line_arguments[0]
if len(command_line_arguments) >= 2:
arguments(command_line_arguments)
else:
print "no arguments passed"
def arguments(command_line_arguments):
print_code = 'p'
quit = 'q'
if print_code in command_line_arguments:
draw(WIDTH, LENGTH)
elif quit in command_line_arguments:
pass
else:
print "no valid arguments passed, pass an argument to do something"
return
def draw(width, length):
matrix = ""
for y in range(length):
for x in range(width):
matrix += "." + " "
matrix.rstrip()
matrix += "\n"
print matrix
return
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Use command line arguments to tell it what to do<commit_after>
|
#!/usr/bin/python2.7
import sys
WIDTH = 10
LENGTH = 22
def main():
command_line_arguments = sys.argv
print command_line_arguments[0]
if len(command_line_arguments) >= 2:
arguments(command_line_arguments)
else:
print "no arguments passed"
def arguments(command_line_arguments):
print_code = 'p'
quit = 'q'
if print_code in command_line_arguments:
draw(WIDTH, LENGTH)
elif quit in command_line_arguments:
pass
else:
print "no valid arguments passed, pass an argument to do something"
return
def draw(width, length):
matrix = ""
for y in range(length):
for x in range(width):
matrix += "." + " "
matrix.rstrip()
matrix += "\n"
print matrix
return
if __name__ == '__main__':
main()
|
Use command line arguments to tell it what to do#!/usr/bin/python2.7
import sys
WIDTH = 10
LENGTH = 22
def main():
command_line_arguments = sys.argv
print command_line_arguments[0]
if len(command_line_arguments) >= 2:
arguments(command_line_arguments)
else:
print "no arguments passed"
def arguments(command_line_arguments):
print_code = 'p'
quit = 'q'
if print_code in command_line_arguments:
draw(WIDTH, LENGTH)
elif quit in command_line_arguments:
pass
else:
print "no valid arguments passed, pass an argument to do something"
return
def draw(width, length):
matrix = ""
for y in range(length):
for x in range(width):
matrix += "." + " "
matrix.rstrip()
matrix += "\n"
print matrix
return
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Use command line arguments to tell it what to do<commit_after>#!/usr/bin/python2.7
import sys
WIDTH = 10
LENGTH = 22
def main():
command_line_arguments = sys.argv
print command_line_arguments[0]
if len(command_line_arguments) >= 2:
arguments(command_line_arguments)
else:
print "no arguments passed"
def arguments(command_line_arguments):
print_code = 'p'
quit = 'q'
if print_code in command_line_arguments:
draw(WIDTH, LENGTH)
elif quit in command_line_arguments:
pass
else:
print "no valid arguments passed, pass an argument to do something"
return
def draw(width, length):
matrix = ""
for y in range(length):
for x in range(width):
matrix += "." + " "
matrix.rstrip()
matrix += "\n"
print matrix
return
if __name__ == '__main__':
main()
|
|
ac7cff3a1a74934cc728297bd274bf3ac3dd9a97
|
logfilter.py
|
logfilter.py
|
#!/usr/bin/python
from paste.translogger import TransLogger
def factory(config, **settings):
def filter(app):
config.update(settings);
return TransLogger(app, setup_console_handler = True)
return filter
|
Add this class useful for debugging, which logs requests passing through it.
|
Add this class useful for debugging, which logs requests passing through it.
|
Python
|
apache-2.0
|
NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api
|
Add this class useful for debugging, which logs requests passing through it.
|
#!/usr/bin/python
from paste.translogger import TransLogger
def factory(config, **settings):
def filter(app):
config.update(settings);
return TransLogger(app, setup_console_handler = True)
return filter
|
<commit_before><commit_msg>Add this class useful for debugging, which logs requests passing through it.<commit_after>
|
#!/usr/bin/python
from paste.translogger import TransLogger
def factory(config, **settings):
def filter(app):
config.update(settings);
return TransLogger(app, setup_console_handler = True)
return filter
|
Add this class useful for debugging, which logs requests passing through it.#!/usr/bin/python
from paste.translogger import TransLogger
def factory(config, **settings):
def filter(app):
config.update(settings);
return TransLogger(app, setup_console_handler = True)
return filter
|
<commit_before><commit_msg>Add this class useful for debugging, which logs requests passing through it.<commit_after>#!/usr/bin/python
from paste.translogger import TransLogger
def factory(config, **settings):
def filter(app):
config.update(settings);
return TransLogger(app, setup_console_handler = True)
return filter
|
|
9db5e0d8a17f250e0d1a9acbcd070876a418c228
|
aleph/migrate/versions/af9b37868cf3_remove_doc_tables.py
|
aleph/migrate/versions/af9b37868cf3_remove_doc_tables.py
|
"""Remove document-related tables
Revision ID: af9b37868cf3
Revises: 284a9ec16306
Create Date: 2019-06-13 17:45:43.310462
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'af9b37868cf3'
down_revision = '284a9ec16306'
def upgrade():
op.drop_index('ix_document_tag_document_id', table_name='document_tag')
op.drop_index('ix_document_tag_origin', table_name='document_tag')
op.drop_table('document_tag')
op.drop_table('balkhash_df643aa36820436e85679170e1f6832f')
op.drop_index('ix_subscription_channel', table_name='subscription')
op.drop_index('ix_subscription_role_id', table_name='subscription')
op.drop_table('subscription')
op.drop_table('audit')
op.drop_index('ix_document_record_document_id',
table_name='document_record')
op.drop_index('ix_document_record_index',
table_name='document_record')
op.drop_table('document_record')
op.drop_column('document', 'status')
op.drop_column('document', 'error_message')
op.drop_column('document', 'body_text')
op.drop_column('document', 'body_raw')
def downgrade():
pass
|
Remove document tables and audit table
|
Remove document tables and audit table
|
Python
|
mit
|
alephdata/aleph,pudo/aleph,alephdata/aleph,pudo/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph,alephdata/aleph
|
Remove document tables and audit table
|
"""Remove document-related tables
Revision ID: af9b37868cf3
Revises: 284a9ec16306
Create Date: 2019-06-13 17:45:43.310462
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'af9b37868cf3'
down_revision = '284a9ec16306'
def upgrade():
op.drop_index('ix_document_tag_document_id', table_name='document_tag')
op.drop_index('ix_document_tag_origin', table_name='document_tag')
op.drop_table('document_tag')
op.drop_table('balkhash_df643aa36820436e85679170e1f6832f')
op.drop_index('ix_subscription_channel', table_name='subscription')
op.drop_index('ix_subscription_role_id', table_name='subscription')
op.drop_table('subscription')
op.drop_table('audit')
op.drop_index('ix_document_record_document_id',
table_name='document_record')
op.drop_index('ix_document_record_index',
table_name='document_record')
op.drop_table('document_record')
op.drop_column('document', 'status')
op.drop_column('document', 'error_message')
op.drop_column('document', 'body_text')
op.drop_column('document', 'body_raw')
def downgrade():
pass
|
<commit_before><commit_msg>Remove document tables and audit table<commit_after>
|
"""Remove document-related tables
Revision ID: af9b37868cf3
Revises: 284a9ec16306
Create Date: 2019-06-13 17:45:43.310462
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'af9b37868cf3'
down_revision = '284a9ec16306'
def upgrade():
op.drop_index('ix_document_tag_document_id', table_name='document_tag')
op.drop_index('ix_document_tag_origin', table_name='document_tag')
op.drop_table('document_tag')
op.drop_table('balkhash_df643aa36820436e85679170e1f6832f')
op.drop_index('ix_subscription_channel', table_name='subscription')
op.drop_index('ix_subscription_role_id', table_name='subscription')
op.drop_table('subscription')
op.drop_table('audit')
op.drop_index('ix_document_record_document_id',
table_name='document_record')
op.drop_index('ix_document_record_index',
table_name='document_record')
op.drop_table('document_record')
op.drop_column('document', 'status')
op.drop_column('document', 'error_message')
op.drop_column('document', 'body_text')
op.drop_column('document', 'body_raw')
def downgrade():
pass
|
Remove document tables and audit table"""Remove document-related tables
Revision ID: af9b37868cf3
Revises: 284a9ec16306
Create Date: 2019-06-13 17:45:43.310462
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'af9b37868cf3'
down_revision = '284a9ec16306'
def upgrade():
op.drop_index('ix_document_tag_document_id', table_name='document_tag')
op.drop_index('ix_document_tag_origin', table_name='document_tag')
op.drop_table('document_tag')
op.drop_table('balkhash_df643aa36820436e85679170e1f6832f')
op.drop_index('ix_subscription_channel', table_name='subscription')
op.drop_index('ix_subscription_role_id', table_name='subscription')
op.drop_table('subscription')
op.drop_table('audit')
op.drop_index('ix_document_record_document_id',
table_name='document_record')
op.drop_index('ix_document_record_index',
table_name='document_record')
op.drop_table('document_record')
op.drop_column('document', 'status')
op.drop_column('document', 'error_message')
op.drop_column('document', 'body_text')
op.drop_column('document', 'body_raw')
def downgrade():
pass
|
<commit_before><commit_msg>Remove document tables and audit table<commit_after>"""Remove document-related tables
Revision ID: af9b37868cf3
Revises: 284a9ec16306
Create Date: 2019-06-13 17:45:43.310462
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'af9b37868cf3'
down_revision = '284a9ec16306'
def upgrade():
op.drop_index('ix_document_tag_document_id', table_name='document_tag')
op.drop_index('ix_document_tag_origin', table_name='document_tag')
op.drop_table('document_tag')
op.drop_table('balkhash_df643aa36820436e85679170e1f6832f')
op.drop_index('ix_subscription_channel', table_name='subscription')
op.drop_index('ix_subscription_role_id', table_name='subscription')
op.drop_table('subscription')
op.drop_table('audit')
op.drop_index('ix_document_record_document_id',
table_name='document_record')
op.drop_index('ix_document_record_index',
table_name='document_record')
op.drop_table('document_record')
op.drop_column('document', 'status')
op.drop_column('document', 'error_message')
op.drop_column('document', 'body_text')
op.drop_column('document', 'body_raw')
def downgrade():
pass
|
|
ec1c315eac4570cea6be7c2c08cefcdbee35d415
|
bin/representation_classes.py
|
bin/representation_classes.py
|
"""representation_classes.py
Compute the representation of the emergent classes in the dataset and the
variance obtained for the null model.
"""
import csv
import marble as mb
#
# Import data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
msa[rows[0]] = rows[1]
## Classes
classes = {}
with open('extr/classes/msa_average/classes.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
classes[rows[0]] =[int(r) for r in rows[1:]]
#
# Extract and save the data
#
for j, city in enumerate(msa):
print "Extract the representation of classes for %s (%s/%s)"%(city,
j+1,
len(msa))
## Import category composition
households = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
households[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
## Compute representation and variance
rep = mb.representation(households, classes)
## Save the values
with open('extr/representation/classes/msa/%s_values.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%val)
output.write('\n')
## Save the variance
with open('extr/representation/classes/msa/%s_variance.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%var)
output.write('\n')
|
Add script to compute the representation of each class in the different areal units
|
Add script to compute the representation of each class in the different areal units
|
Python
|
bsd-3-clause
|
rlouf/patterns-of-segregation
|
Add script to compute the representation of each class in the different areal units
|
"""representation_classes.py
Compute the representation of the emergent classes in the dataset and the
variance obtained for the null model.
"""
import csv
import marble as mb
#
# Import data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
msa[rows[0]] = rows[1]
## Classes
classes = {}
with open('extr/classes/msa_average/classes.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
classes[rows[0]] =[int(r) for r in rows[1:]]
#
# Extract and save the data
#
for j, city in enumerate(msa):
print "Extract the representation of classes for %s (%s/%s)"%(city,
j+1,
len(msa))
## Import category composition
households = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
households[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
## Compute representation and variance
rep = mb.representation(households, classes)
## Save the values
with open('extr/representation/classes/msa/%s_values.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%val)
output.write('\n')
## Save the variance
with open('extr/representation/classes/msa/%s_variance.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%var)
output.write('\n')
|
<commit_before><commit_msg>Add script to compute the representation of each class in the different areal units<commit_after>
|
"""representation_classes.py
Compute the representation of the emergent classes in the dataset and the
variance obtained for the null model.
"""
import csv
import marble as mb
#
# Import data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
msa[rows[0]] = rows[1]
## Classes
classes = {}
with open('extr/classes/msa_average/classes.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
classes[rows[0]] =[int(r) for r in rows[1:]]
#
# Extract and save the data
#
for j, city in enumerate(msa):
print "Extract the representation of classes for %s (%s/%s)"%(city,
j+1,
len(msa))
## Import category composition
households = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
households[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
## Compute representation and variance
rep = mb.representation(households, classes)
## Save the values
with open('extr/representation/classes/msa/%s_values.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%val)
output.write('\n')
## Save the variance
with open('extr/representation/classes/msa/%s_variance.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%var)
output.write('\n')
|
Add script to compute the representation of each class in the different areal units"""representation_classes.py
Compute the representation of the emergent classes in the dataset and the
variance obtained for the null model.
"""
import csv
import marble as mb
#
# Import data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
msa[rows[0]] = rows[1]
## Classes
classes = {}
with open('extr/classes/msa_average/classes.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
classes[rows[0]] =[int(r) for r in rows[1:]]
#
# Extract and save the data
#
for j, city in enumerate(msa):
print "Extract the representation of classes for %s (%s/%s)"%(city,
j+1,
len(msa))
## Import category composition
households = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
households[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
## Compute representation and variance
rep = mb.representation(households, classes)
## Save the values
with open('extr/representation/classes/msa/%s_values.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%val)
output.write('\n')
## Save the variance
with open('extr/representation/classes/msa/%s_variance.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%var)
output.write('\n')
|
<commit_before><commit_msg>Add script to compute the representation of each class in the different areal units<commit_after>"""representation_classes.py
Compute the representation of the emergent classes in the dataset and the
variance obtained for the null model.
"""
import csv
import marble as mb
#
# Import data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
msa[rows[0]] = rows[1]
## Classes
classes = {}
with open('extr/classes/msa_average/classes.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
classes[rows[0]] =[int(r) for r in rows[1:]]
#
# Extract and save the data
#
for j, city in enumerate(msa):
print "Extract the representation of classes for %s (%s/%s)"%(city,
j+1,
len(msa))
## Import category composition
households = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
households[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
## Compute representation and variance
rep = mb.representation(households, classes)
## Save the values
with open('extr/representation/classes/msa/%s_values.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%val)
output.write('\n')
## Save the variance
with open('extr/representation/classes/msa/%s_variance.csv'%city, 'w') as output:
output.write('BLOCKGROUP FIP')
for cl in sorted(classes.iterkeys()):
output.write('\t%s'%cl)
output.write('\n')
for bg in rep:
output.write(str(bg))
for cat in sorted(rep[bg].iterkeys()):
val, var = rep[bg][cat]
output.write('\t%s'%var)
output.write('\n')
|
|
01673b2e2d154abc7354881ae92162245f0ac78f
|
pygametemplate/image.py
|
pygametemplate/image.py
|
"""Module containing the Image class for easily adding/removing images from RAM."""
from pygametemplate import load_image
class Image:
def __init__(self, image_name):
self.name = image_name
self.image = None
def load(self):
"""Load the image into RAM."""
self.image = load_image(self.name)
def unload(self):
"""Unload the image from RAM."""
self.image = None
def display(self, surface, coordinates, area=None, special_flags=0):
"""Display the image on the given surface."""
if self.image is None:
self.load()
surface.blit(self.image, coordinates, area, special_flags)
|
Add implementation of pygametemplate.Image class which passes all unit tests :)
|
Add implementation of pygametemplate.Image class which passes all unit tests :)
|
Python
|
mit
|
AndyDeany/pygame-template
|
Add implementation of pygametemplate.Image class which passes all unit tests :)
|
"""Module containing the Image class for easily adding/removing images from RAM."""
from pygametemplate import load_image
class Image:
def __init__(self, image_name):
self.name = image_name
self.image = None
def load(self):
"""Load the image into RAM."""
self.image = load_image(self.name)
def unload(self):
"""Unload the image from RAM."""
self.image = None
def display(self, surface, coordinates, area=None, special_flags=0):
"""Display the image on the given surface."""
if self.image is None:
self.load()
surface.blit(self.image, coordinates, area, special_flags)
|
<commit_before><commit_msg>Add implementation of pygametemplate.Image class which passes all unit tests :)<commit_after>
|
"""Module containing the Image class for easily adding/removing images from RAM."""
from pygametemplate import load_image
class Image:
def __init__(self, image_name):
self.name = image_name
self.image = None
def load(self):
"""Load the image into RAM."""
self.image = load_image(self.name)
def unload(self):
"""Unload the image from RAM."""
self.image = None
def display(self, surface, coordinates, area=None, special_flags=0):
"""Display the image on the given surface."""
if self.image is None:
self.load()
surface.blit(self.image, coordinates, area, special_flags)
|
Add implementation of pygametemplate.Image class which passes all unit tests :)"""Module containing the Image class for easily adding/removing images from RAM."""
from pygametemplate import load_image
class Image:
def __init__(self, image_name):
self.name = image_name
self.image = None
def load(self):
"""Load the image into RAM."""
self.image = load_image(self.name)
def unload(self):
"""Unload the image from RAM."""
self.image = None
def display(self, surface, coordinates, area=None, special_flags=0):
"""Display the image on the given surface."""
if self.image is None:
self.load()
surface.blit(self.image, coordinates, area, special_flags)
|
<commit_before><commit_msg>Add implementation of pygametemplate.Image class which passes all unit tests :)<commit_after>"""Module containing the Image class for easily adding/removing images from RAM."""
from pygametemplate import load_image
class Image:
def __init__(self, image_name):
self.name = image_name
self.image = None
def load(self):
"""Load the image into RAM."""
self.image = load_image(self.name)
def unload(self):
"""Unload the image from RAM."""
self.image = None
def display(self, surface, coordinates, area=None, special_flags=0):
"""Display the image on the given surface."""
if self.image is None:
self.load()
surface.blit(self.image, coordinates, area, special_flags)
|
|
507565fd461fd34f27a44d179a69648b775d9e2a
|
euxfel_h5tools/h5index.py
|
euxfel_h5tools/h5index.py
|
import csv
import h5py
import sys
def hdf5_datasets(grp):
"""Print CSV data of all datasets in an HDF5 file.
path, shape, dtype
"""
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'shape', 'dtype'])
def visitor(path, item):
if isinstance(item, h5py.Dataset):
writer.writerow([path, item.shape, item.dtype.str])
grp.visititems(visitor)
def main():
file = h5py.File(sys.argv[1])
hdf5_datasets(file)
if __name__ == '__main__':
main()
|
Add a utility for indexing structure of HDF5 files
|
Add a utility for indexing structure of HDF5 files
|
Python
|
bsd-3-clause
|
European-XFEL/h5tools-py
|
Add a utility for indexing structure of HDF5 files
|
import csv
import h5py
import sys
def hdf5_datasets(grp):
"""Print CSV data of all datasets in an HDF5 file.
path, shape, dtype
"""
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'shape', 'dtype'])
def visitor(path, item):
if isinstance(item, h5py.Dataset):
writer.writerow([path, item.shape, item.dtype.str])
grp.visititems(visitor)
def main():
file = h5py.File(sys.argv[1])
hdf5_datasets(file)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a utility for indexing structure of HDF5 files<commit_after>
|
import csv
import h5py
import sys
def hdf5_datasets(grp):
"""Print CSV data of all datasets in an HDF5 file.
path, shape, dtype
"""
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'shape', 'dtype'])
def visitor(path, item):
if isinstance(item, h5py.Dataset):
writer.writerow([path, item.shape, item.dtype.str])
grp.visititems(visitor)
def main():
file = h5py.File(sys.argv[1])
hdf5_datasets(file)
if __name__ == '__main__':
main()
|
Add a utility for indexing structure of HDF5 filesimport csv
import h5py
import sys
def hdf5_datasets(grp):
"""Print CSV data of all datasets in an HDF5 file.
path, shape, dtype
"""
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'shape', 'dtype'])
def visitor(path, item):
if isinstance(item, h5py.Dataset):
writer.writerow([path, item.shape, item.dtype.str])
grp.visititems(visitor)
def main():
file = h5py.File(sys.argv[1])
hdf5_datasets(file)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a utility for indexing structure of HDF5 files<commit_after>import csv
import h5py
import sys
def hdf5_datasets(grp):
"""Print CSV data of all datasets in an HDF5 file.
path, shape, dtype
"""
writer = csv.writer(sys.stdout)
writer.writerow(['path', 'shape', 'dtype'])
def visitor(path, item):
if isinstance(item, h5py.Dataset):
writer.writerow([path, item.shape, item.dtype.str])
grp.visititems(visitor)
def main():
file = h5py.File(sys.argv[1])
hdf5_datasets(file)
if __name__ == '__main__':
main()
|
|
a782f9b02a9e6dd1e72076605761759c883220e0
|
convert_sorted_array_to_binary_search_tree.py
|
convert_sorted_array_to_binary_search_tree.py
|
# coding: utf-8
# author: Fei Gao <leetcode.com@feigao.xyz>
# Problem: convert sorted array to binary search tree
#
# Given an array where elements are sorted in ascending order,
# convert it to a height balanced BST.
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (M) Convert Sorted List to Binary Search Tree
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self):
return '[{}, {}, {}]'.format(self.val, str(self.left), str(self.right))
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n < 1: return None
lo, hi = 0, n-1
mi = (lo + hi + 1) // 2
root = TreeNode(nums[mi])
if mi - lo > 0:
root.left = self.sortedArrayToBST(nums[lo:mi])
if hi - mi > 0:
root.right = self.sortedArrayToBST(nums[mi+1:hi+1])
return root
def main():
solver = Solution()
tests = [
((range(1),), None),
((range(2),), None),
((range(3),), None),
((range(5),), None),
((range(15),), None),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.sortedArrayToBST(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
Convert Sorted Array to Binary Search Tree
|
Convert Sorted Array to Binary Search Tree
|
Python
|
mit
|
feigaochn/leetcode
|
Convert Sorted Array to Binary Search Tree
|
# coding: utf-8
# author: Fei Gao <leetcode.com@feigao.xyz>
# Problem: convert sorted array to binary search tree
#
# Given an array where elements are sorted in ascending order,
# convert it to a height balanced BST.
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (M) Convert Sorted List to Binary Search Tree
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self):
return '[{}, {}, {}]'.format(self.val, str(self.left), str(self.right))
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n < 1: return None
lo, hi = 0, n-1
mi = (lo + hi + 1) // 2
root = TreeNode(nums[mi])
if mi - lo > 0:
root.left = self.sortedArrayToBST(nums[lo:mi])
if hi - mi > 0:
root.right = self.sortedArrayToBST(nums[mi+1:hi+1])
return root
def main():
solver = Solution()
tests = [
((range(1),), None),
((range(2),), None),
((range(3),), None),
((range(5),), None),
((range(15),), None),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.sortedArrayToBST(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
<commit_before><commit_msg>Convert Sorted Array to Binary Search Tree<commit_after>
|
# coding: utf-8
# author: Fei Gao <leetcode.com@feigao.xyz>
# Problem: convert sorted array to binary search tree
#
# Given an array where elements are sorted in ascending order,
# convert it to a height balanced BST.
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (M) Convert Sorted List to Binary Search Tree
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self):
return '[{}, {}, {}]'.format(self.val, str(self.left), str(self.right))
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n < 1: return None
lo, hi = 0, n-1
mi = (lo + hi + 1) // 2
root = TreeNode(nums[mi])
if mi - lo > 0:
root.left = self.sortedArrayToBST(nums[lo:mi])
if hi - mi > 0:
root.right = self.sortedArrayToBST(nums[mi+1:hi+1])
return root
def main():
solver = Solution()
tests = [
((range(1),), None),
((range(2),), None),
((range(3),), None),
((range(5),), None),
((range(15),), None),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.sortedArrayToBST(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
Convert Sorted Array to Binary Search Tree# coding: utf-8
# author: Fei Gao <leetcode.com@feigao.xyz>
# Problem: convert sorted array to binary search tree
#
# Given an array where elements are sorted in ascending order,
# convert it to a height balanced BST.
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (M) Convert Sorted List to Binary Search Tree
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self):
return '[{}, {}, {}]'.format(self.val, str(self.left), str(self.right))
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n < 1: return None
lo, hi = 0, n-1
mi = (lo + hi + 1) // 2
root = TreeNode(nums[mi])
if mi - lo > 0:
root.left = self.sortedArrayToBST(nums[lo:mi])
if hi - mi > 0:
root.right = self.sortedArrayToBST(nums[mi+1:hi+1])
return root
def main():
solver = Solution()
tests = [
((range(1),), None),
((range(2),), None),
((range(3),), None),
((range(5),), None),
((range(15),), None),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.sortedArrayToBST(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
<commit_before><commit_msg>Convert Sorted Array to Binary Search Tree<commit_after># coding: utf-8
# author: Fei Gao <leetcode.com@feigao.xyz>
# Problem: convert sorted array to binary search tree
#
# Given an array where elements are sorted in ascending order,
# convert it to a height balanced BST.
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (M) Convert Sorted List to Binary Search Tree
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self):
return '[{}, {}, {}]'.format(self.val, str(self.left), str(self.right))
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n < 1: return None
lo, hi = 0, n-1
mi = (lo + hi + 1) // 2
root = TreeNode(nums[mi])
if mi - lo > 0:
root.left = self.sortedArrayToBST(nums[lo:mi])
if hi - mi > 0:
root.right = self.sortedArrayToBST(nums[mi+1:hi+1])
return root
def main():
solver = Solution()
tests = [
((range(1),), None),
((range(2),), None),
((range(3),), None),
((range(5),), None),
((range(15),), None),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.sortedArrayToBST(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
|
cf59fde7b54a2e537adad9695f04aeed1712e02d
|
d1lod/tests/test_util.py
|
d1lod/tests/test_util.py
|
"""test_util.py"""
from d1lod import util
def test_can_match_doi_strings():
"""Tests the matching algorithm which takes unstructured strings and guesses
their identifier structure using string-matching.
"""
assert util.getIdentifierScheme("http://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("https://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("doi:10.XX") == "doi"
assert util.getIdentifierScheme("ark://1234") == "ark"
assert util.getIdentifierScheme("somethingsomething") != "doi"
assert util.getIdentifierScheme("somethingsomething") == "local-resource-identifier-scheme"
|
Add basic unit test of getIdentifierScheme
|
Add basic unit test of getIdentifierScheme
|
Python
|
apache-2.0
|
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
|
Add basic unit test of getIdentifierScheme
|
"""test_util.py"""
from d1lod import util
def test_can_match_doi_strings():
"""Tests the matching algorithm which takes unstructured strings and guesses
their identifier structure using string-matching.
"""
assert util.getIdentifierScheme("http://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("https://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("doi:10.XX") == "doi"
assert util.getIdentifierScheme("ark://1234") == "ark"
assert util.getIdentifierScheme("somethingsomething") != "doi"
assert util.getIdentifierScheme("somethingsomething") == "local-resource-identifier-scheme"
|
<commit_before><commit_msg>Add basic unit test of getIdentifierScheme<commit_after>
|
"""test_util.py"""
from d1lod import util
def test_can_match_doi_strings():
"""Tests the matching algorithm which takes unstructured strings and guesses
their identifier structure using string-matching.
"""
assert util.getIdentifierScheme("http://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("https://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("doi:10.XX") == "doi"
assert util.getIdentifierScheme("ark://1234") == "ark"
assert util.getIdentifierScheme("somethingsomething") != "doi"
assert util.getIdentifierScheme("somethingsomething") == "local-resource-identifier-scheme"
|
Add basic unit test of getIdentifierScheme"""test_util.py"""
from d1lod import util
def test_can_match_doi_strings():
"""Tests the matching algorithm which takes unstructured strings and guesses
their identifier structure using string-matching.
"""
assert util.getIdentifierScheme("http://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("https://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("doi:10.XX") == "doi"
assert util.getIdentifierScheme("ark://1234") == "ark"
assert util.getIdentifierScheme("somethingsomething") != "doi"
assert util.getIdentifierScheme("somethingsomething") == "local-resource-identifier-scheme"
|
<commit_before><commit_msg>Add basic unit test of getIdentifierScheme<commit_after>"""test_util.py"""
from d1lod import util
def test_can_match_doi_strings():
"""Tests the matching algorithm which takes unstructured strings and guesses
their identifier structure using string-matching.
"""
assert util.getIdentifierScheme("http://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("https://doi.org/10.XX") == "doi"
assert util.getIdentifierScheme("doi:10.XX") == "doi"
assert util.getIdentifierScheme("ark://1234") == "ark"
assert util.getIdentifierScheme("somethingsomething") != "doi"
assert util.getIdentifierScheme("somethingsomething") == "local-resource-identifier-scheme"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.