commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5d98c4109322d3346609fdb1bf44cfd4a16dc94c
|
project_management/tests/test_main.py
|
project_management/tests/test_main.py
|
"""
Test main pm functionality
"""
from cement.utils import test
from pmtools import PmController, PmApp
from test_default import config_defaults, PmTestApp
class PmMainTest(test.CementTestCase):
app_class = PmTestApp
def test_1_help(self):
app = self.make_app(argv=['--help'])
app.setup()
print dir(app)
print app.argv
print dir(app.config)
print dir(app._meta)
print app._meta.label
print app._meta.base_controller.Meta.label
app.run()
app.close()
|
Add tests for main app
|
Add tests for main app
|
Python
|
mit
|
SciLifeLab/scilifelab,kate-v-stepanova/scilifelab,senthil10/scilifelab,senthil10/scilifelab,SciLifeLab/scilifelab,jun-wan/scilifelab,SciLifeLab/scilifelab,jun-wan/scilifelab,senthil10/scilifelab,jun-wan/scilifelab,kate-v-stepanova/scilifelab,SciLifeLab/scilifelab,kate-v-stepanova/scilifelab,kate-v-stepanova/scilifelab,jun-wan/scilifelab,senthil10/scilifelab
|
Add tests for main app
|
"""
Test main pm functionality
"""
from cement.utils import test
from pmtools import PmController, PmApp
from test_default import config_defaults, PmTestApp
class PmMainTest(test.CementTestCase):
app_class = PmTestApp
def test_1_help(self):
app = self.make_app(argv=['--help'])
app.setup()
print dir(app)
print app.argv
print dir(app.config)
print dir(app._meta)
print app._meta.label
print app._meta.base_controller.Meta.label
app.run()
app.close()
|
<commit_before><commit_msg>Add tests for main app<commit_after>
|
"""
Test main pm functionality
"""
from cement.utils import test
from pmtools import PmController, PmApp
from test_default import config_defaults, PmTestApp
class PmMainTest(test.CementTestCase):
app_class = PmTestApp
def test_1_help(self):
app = self.make_app(argv=['--help'])
app.setup()
print dir(app)
print app.argv
print dir(app.config)
print dir(app._meta)
print app._meta.label
print app._meta.base_controller.Meta.label
app.run()
app.close()
|
Add tests for main app"""
Test main pm functionality
"""
from cement.utils import test
from pmtools import PmController, PmApp
from test_default import config_defaults, PmTestApp
class PmMainTest(test.CementTestCase):
app_class = PmTestApp
def test_1_help(self):
app = self.make_app(argv=['--help'])
app.setup()
print dir(app)
print app.argv
print dir(app.config)
print dir(app._meta)
print app._meta.label
print app._meta.base_controller.Meta.label
app.run()
app.close()
|
<commit_before><commit_msg>Add tests for main app<commit_after>"""
Test main pm functionality
"""
from cement.utils import test
from pmtools import PmController, PmApp
from test_default import config_defaults, PmTestApp
class PmMainTest(test.CementTestCase):
app_class = PmTestApp
def test_1_help(self):
app = self.make_app(argv=['--help'])
app.setup()
print dir(app)
print app.argv
print dir(app.config)
print dir(app._meta)
print app._meta.label
print app._meta.base_controller.Meta.label
app.run()
app.close()
|
|
68b5e3438b771afe4a70019a12fdd6d60a587c7e
|
py/1-bit-and-2-bit-characters.py
|
py/1-bit-and-2-bit-characters.py
|
class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
s0, s1 = True, False
for i, b in enumerate(bits[:-1]):
if b == 1:
s0, s1 = s1 and bits[i - 1] == 1, s0
else:
s0, s1 = True, False
return s0
|
Add py solution for 717. 1-bit and 2-bit Characters
|
Add py solution for 717. 1-bit and 2-bit Characters
717. 1-bit and 2-bit Characters: https://leetcode.com/problems/1-bit-and-2-bit-characters/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 717. 1-bit and 2-bit Characters
717. 1-bit and 2-bit Characters: https://leetcode.com/problems/1-bit-and-2-bit-characters/
|
class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
s0, s1 = True, False
for i, b in enumerate(bits[:-1]):
if b == 1:
s0, s1 = s1 and bits[i - 1] == 1, s0
else:
s0, s1 = True, False
return s0
|
<commit_before><commit_msg>Add py solution for 717. 1-bit and 2-bit Characters
717. 1-bit and 2-bit Characters: https://leetcode.com/problems/1-bit-and-2-bit-characters/<commit_after>
|
class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
s0, s1 = True, False
for i, b in enumerate(bits[:-1]):
if b == 1:
s0, s1 = s1 and bits[i - 1] == 1, s0
else:
s0, s1 = True, False
return s0
|
Add py solution for 717. 1-bit and 2-bit Characters
717. 1-bit and 2-bit Characters: https://leetcode.com/problems/1-bit-and-2-bit-characters/class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
s0, s1 = True, False
for i, b in enumerate(bits[:-1]):
if b == 1:
s0, s1 = s1 and bits[i - 1] == 1, s0
else:
s0, s1 = True, False
return s0
|
<commit_before><commit_msg>Add py solution for 717. 1-bit and 2-bit Characters
717. 1-bit and 2-bit Characters: https://leetcode.com/problems/1-bit-and-2-bit-characters/<commit_after>class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
s0, s1 = True, False
for i, b in enumerate(bits[:-1]):
if b == 1:
s0, s1 = s1 and bits[i - 1] == 1, s0
else:
s0, s1 = True, False
return s0
|
|
0738a6a4739db2a34659e469b4d262a971bc4938
|
heppyplotlib/errorcalc.py
|
heppyplotlib/errorcalc.py
|
"""Functions for calculating errors by combining different datasets."""
import math
def asymmetric_hessian_error(value_lists):
"""Calculate the asymmetric hessian error from a list of datasets,
where the first dataset stems from a PDF CV run."""
lows = []
highs = []
transposed_value_lists = zip(*value_lists)
for values in transposed_value_lists:
central_value = values[0]
evs = values[1:]
error = [0.0, 0.0]
for i in range(0, len(evs), 2):
(ev_p, ev_m) = evs[i:i+2]
error[0] += max(ev_p - central_value, ev_m - central_value, 0)**2
error[1] += max(central_value - ev_p, central_value - ev_m, 0)**2
high = central_value + math.sqrt(error[0])
low = central_value - math.sqrt(error[1])
highs.append(high)
lows.append(low)
return (lows, highs)
|
Add a calc function for asymmetric hessian errors
|
Add a calc function for asymmetric hessian errors
|
Python
|
mit
|
ebothmann/heppyplotlib
|
Add a calc function for asymmetric hessian errors
|
"""Functions for calculating errors by combining different datasets."""
import math
def asymmetric_hessian_error(value_lists):
"""Calculate the asymmetric hessian error from a list of datasets,
where the first dataset stems from a PDF CV run."""
lows = []
highs = []
transposed_value_lists = zip(*value_lists)
for values in transposed_value_lists:
central_value = values[0]
evs = values[1:]
error = [0.0, 0.0]
for i in range(0, len(evs), 2):
(ev_p, ev_m) = evs[i:i+2]
error[0] += max(ev_p - central_value, ev_m - central_value, 0)**2
error[1] += max(central_value - ev_p, central_value - ev_m, 0)**2
high = central_value + math.sqrt(error[0])
low = central_value - math.sqrt(error[1])
highs.append(high)
lows.append(low)
return (lows, highs)
|
<commit_before><commit_msg>Add a calc function for asymmetric hessian errors<commit_after>
|
"""Functions for calculating errors by combining different datasets."""
import math
def asymmetric_hessian_error(value_lists):
"""Calculate the asymmetric hessian error from a list of datasets,
where the first dataset stems from a PDF CV run."""
lows = []
highs = []
transposed_value_lists = zip(*value_lists)
for values in transposed_value_lists:
central_value = values[0]
evs = values[1:]
error = [0.0, 0.0]
for i in range(0, len(evs), 2):
(ev_p, ev_m) = evs[i:i+2]
error[0] += max(ev_p - central_value, ev_m - central_value, 0)**2
error[1] += max(central_value - ev_p, central_value - ev_m, 0)**2
high = central_value + math.sqrt(error[0])
low = central_value - math.sqrt(error[1])
highs.append(high)
lows.append(low)
return (lows, highs)
|
Add a calc function for asymmetric hessian errors"""Functions for calculating errors by combining different datasets."""
import math
def asymmetric_hessian_error(value_lists):
"""Calculate the asymmetric hessian error from a list of datasets,
where the first dataset stems from a PDF CV run."""
lows = []
highs = []
transposed_value_lists = zip(*value_lists)
for values in transposed_value_lists:
central_value = values[0]
evs = values[1:]
error = [0.0, 0.0]
for i in range(0, len(evs), 2):
(ev_p, ev_m) = evs[i:i+2]
error[0] += max(ev_p - central_value, ev_m - central_value, 0)**2
error[1] += max(central_value - ev_p, central_value - ev_m, 0)**2
high = central_value + math.sqrt(error[0])
low = central_value - math.sqrt(error[1])
highs.append(high)
lows.append(low)
return (lows, highs)
|
<commit_before><commit_msg>Add a calc function for asymmetric hessian errors<commit_after>"""Functions for calculating errors by combining different datasets."""
import math
def asymmetric_hessian_error(value_lists):
"""Calculate the asymmetric hessian error from a list of datasets,
where the first dataset stems from a PDF CV run."""
lows = []
highs = []
transposed_value_lists = zip(*value_lists)
for values in transposed_value_lists:
central_value = values[0]
evs = values[1:]
error = [0.0, 0.0]
for i in range(0, len(evs), 2):
(ev_p, ev_m) = evs[i:i+2]
error[0] += max(ev_p - central_value, ev_m - central_value, 0)**2
error[1] += max(central_value - ev_p, central_value - ev_m, 0)**2
high = central_value + math.sqrt(error[0])
low = central_value - math.sqrt(error[1])
highs.append(high)
lows.append(low)
return (lows, highs)
|
|
aeb657dae9decb0ea25361e530fdbe0399a1650e
|
tests/test_validators.py
|
tests/test_validators.py
|
"""
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
|
Add first basic unittests using py.test
|
Add first basic unittests using py.test
|
Python
|
bsd-3-clause
|
mfa/wtforms-clone,maxcountryman/wtforms,mfa/wtforms-clone
|
Add first basic unittests using py.test
|
"""
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
|
<commit_before><commit_msg>Add first basic unittests using py.test<commit_after>
|
"""
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
|
Add first basic unittests using py.test"""
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
|
<commit_before><commit_msg>Add first basic unittests using py.test<commit_after>"""
test_validators
~~~~~~~~~~~~~~
Unittests for bundled validators.
:copyright: 2007-2008 by James Crasta, Thomas Johansson.
:license: MIT, see LICENSE.txt for details.
"""
from py.test import raises
from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address
class DummyForm(object):
pass
class DummyField(object):
def __init__(self, data):
self.data = data
form = DummyForm()
def test_email():
assert email(form, DummyField('foo@bar.dk')) == None
assert email(form, DummyField('123@bar.dk')) == None
assert email(form, DummyField('foo@456.dk')) == None
assert email(form, DummyField('foo@bar456.info')) == None
raises(ValidationError, email, form, DummyField('foo')) == None
raises(ValidationError, email, form, DummyField('bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@')) == None
raises(ValidationError, email, form, DummyField('@bar.dk')) == None
raises(ValidationError, email, form, DummyField('foo@bar')) == None
raises(ValidationError, email, form, DummyField('foo@bar.ab12')) == None
raises(ValidationError, email, form, DummyField('foo@bar.abcde')) == None
def test_length():
field = DummyField('foobar')
assert length(min=2, max=6)(form, field) == None
raises(ValidationError, length(min=7), form, field)
raises(ValidationError, length(max=5), form, field)
def test_url():
assert url()(form, DummyField('http://foobar.dk')) == None
assert url()(form, DummyField('http://foobar.dk/')) == None
assert url()(form, DummyField('http://foobar.dk/foobar')) == None
raises(ValidationError, url(), form, DummyField('http://foobar'))
raises(ValidationError, url(), form, DummyField('foobar.dk'))
raises(ValidationError, url(), form, DummyField('http://foobar.12'))
def test_not_empty():
assert not_empty()(form, DummyField('foobar')) == None
raises(ValidationError, not_empty(), form, DummyField(''))
raises(ValidationError, not_empty(), form, DummyField(' '))
def test_ip_address():
assert ip_address(form, DummyField('127.0.0.1')) == None
raises(ValidationError, ip_address, form, DummyField('abc.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('1278.0.0.1'))
raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
|
|
a205a7032aa72111e1cb44f4989363b4e64be28a
|
tools/download-wheels.py
|
tools/download-wheels.py
|
#!/usr/bin/env python
"""
Download NumPy wheels from Anaconda staging area.
"""
import sys
import os
import re
import shutil
import argparse
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
ANACONDA_INDEX = 'https://anaconda.org/multibuild-wheels-staging/numpy/files'
ANACONDA_FILES = 'https://anaconda.org/multibuild-wheels-staging/numpy/simple'
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
"""
tmpl = re.compile('^.*numpy-' + version + '.*\.whl$')
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
indx = http.request('GET', ANACONDA_INDEX)
soup = BeautifulSoup(indx.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given NumPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
wheelhouse : str
Directory in which to download the wheels.
"""
wheel_names = get_wheel_names(version[0])
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
for wheel_name in wheel_names:
wheel_url = os.path.join(ANACONDA_FILES, wheel_name)
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"Downloading {wheel_name}")
shutil.copyfileobj(r, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="NumPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
download_wheels(args.version, wheelhouse)
|
Add tool for downloading release wheels from Anaconda.
|
ENH: Add tool for downloading release wheels from Anaconda.
This is a simplified version of terryfy::wheel-uploader that has
two advantages over the original:
- It works with Anaconda where our wheels are now stored.
- It is simplified to match the NumPY workflow.
|
Python
|
bsd-3-clause
|
mattip/numpy,pdebuyl/numpy,endolith/numpy,abalkin/numpy,mhvk/numpy,mhvk/numpy,charris/numpy,seberg/numpy,pbrod/numpy,mattip/numpy,numpy/numpy,pbrod/numpy,jakirkham/numpy,pdebuyl/numpy,seberg/numpy,pbrod/numpy,jakirkham/numpy,pdebuyl/numpy,simongibbons/numpy,charris/numpy,endolith/numpy,grlee77/numpy,grlee77/numpy,mhvk/numpy,jakirkham/numpy,numpy/numpy,anntzer/numpy,grlee77/numpy,rgommers/numpy,endolith/numpy,grlee77/numpy,madphysicist/numpy,charris/numpy,pbrod/numpy,numpy/numpy,rgommers/numpy,madphysicist/numpy,abalkin/numpy,numpy/numpy,pbrod/numpy,simongibbons/numpy,mattip/numpy,simongibbons/numpy,grlee77/numpy,anntzer/numpy,charris/numpy,simongibbons/numpy,madphysicist/numpy,pdebuyl/numpy,jakirkham/numpy,rgommers/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,madphysicist/numpy,simongibbons/numpy,seberg/numpy,endolith/numpy,abalkin/numpy,seberg/numpy,anntzer/numpy,mhvk/numpy,madphysicist/numpy,mhvk/numpy,anntzer/numpy
|
ENH: Add tool for downloading release wheels from Anaconda.
This is a simplified version of terryfy::wheel-uploader that has
two advantages over the original:
- It works with Anaconda where our wheels are now stored.
- It is simplified to match the NumPY workflow.
|
#!/usr/bin/env python
"""
Download NumPy wheels from Anaconda staging area.
"""
import sys
import os
import re
import shutil
import argparse
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
ANACONDA_INDEX = 'https://anaconda.org/multibuild-wheels-staging/numpy/files'
ANACONDA_FILES = 'https://anaconda.org/multibuild-wheels-staging/numpy/simple'
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
"""
tmpl = re.compile('^.*numpy-' + version + '.*\.whl$')
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
indx = http.request('GET', ANACONDA_INDEX)
soup = BeautifulSoup(indx.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given NumPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
wheelhouse : str
Directory in which to download the wheels.
"""
wheel_names = get_wheel_names(version[0])
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
for wheel_name in wheel_names:
wheel_url = os.path.join(ANACONDA_FILES, wheel_name)
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"Downloading {wheel_name}")
shutil.copyfileobj(r, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="NumPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
download_wheels(args.version, wheelhouse)
|
<commit_before><commit_msg>ENH: Add tool for downloading release wheels from Anaconda.
This is a simplified version of terryfy::wheel-uploader that has
two advantages over the original:
- It works with Anaconda where our wheels are now stored.
- It is simplified to match the NumPY workflow.<commit_after>
|
#!/usr/bin/env python
"""
Download NumPy wheels from Anaconda staging area.
"""
import sys
import os
import re
import shutil
import argparse
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
ANACONDA_INDEX = 'https://anaconda.org/multibuild-wheels-staging/numpy/files'
ANACONDA_FILES = 'https://anaconda.org/multibuild-wheels-staging/numpy/simple'
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
"""
tmpl = re.compile('^.*numpy-' + version + '.*\.whl$')
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
indx = http.request('GET', ANACONDA_INDEX)
soup = BeautifulSoup(indx.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given NumPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
wheelhouse : str
Directory in which to download the wheels.
"""
wheel_names = get_wheel_names(version[0])
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
for wheel_name in wheel_names:
wheel_url = os.path.join(ANACONDA_FILES, wheel_name)
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"Downloading {wheel_name}")
shutil.copyfileobj(r, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="NumPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
download_wheels(args.version, wheelhouse)
|
ENH: Add tool for downloading release wheels from Anaconda.
This is a simplified version of terryfy::wheel-uploader that has
two advantages over the original:
- It works with Anaconda where our wheels are now stored.
- It is simplified to match the NumPY workflow.#!/usr/bin/env python
"""
Download NumPy wheels from Anaconda staging area.
"""
import sys
import os
import re
import shutil
import argparse
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
ANACONDA_INDEX = 'https://anaconda.org/multibuild-wheels-staging/numpy/files'
ANACONDA_FILES = 'https://anaconda.org/multibuild-wheels-staging/numpy/simple'
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
"""
tmpl = re.compile('^.*numpy-' + version + '.*\.whl$')
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
indx = http.request('GET', ANACONDA_INDEX)
soup = BeautifulSoup(indx.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given NumPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
wheelhouse : str
Directory in which to download the wheels.
"""
wheel_names = get_wheel_names(version[0])
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
for wheel_name in wheel_names:
wheel_url = os.path.join(ANACONDA_FILES, wheel_name)
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"Downloading {wheel_name}")
shutil.copyfileobj(r, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="NumPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
download_wheels(args.version, wheelhouse)
|
<commit_before><commit_msg>ENH: Add tool for downloading release wheels from Anaconda.
This is a simplified version of terryfy::wheel-uploader that has
two advantages over the original:
- It works with Anaconda where our wheels are now stored.
- It is simplified to match the NumPY workflow.<commit_after>#!/usr/bin/env python
"""
Download NumPy wheels from Anaconda staging area.
"""
import sys
import os
import re
import shutil
import argparse
import urllib3
from bs4 import BeautifulSoup
__version__ = '0.1'
ANACONDA_INDEX = 'https://anaconda.org/multibuild-wheels-staging/numpy/files'
ANACONDA_FILES = 'https://anaconda.org/multibuild-wheels-staging/numpy/simple'
def get_wheel_names(version):
""" Get wheel names from Anaconda HTML directory.
This looks in the Anaconda multibuild-wheels-staging page and
parses the HTML to get all the wheel names for a release version.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
"""
tmpl = re.compile('^.*numpy-' + version + '.*\.whl$')
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
indx = http.request('GET', ANACONDA_INDEX)
soup = BeautifulSoup(indx.data, 'html.parser')
return soup.findAll(text=tmpl)
def download_wheels(version, wheelhouse):
"""Download release wheels.
The release wheels for the given NumPy version are downloaded
into the given directory.
Parameters
----------
version : str
The release version. For instance, "1.18.3".
wheelhouse : str
Directory in which to download the wheels.
"""
wheel_names = get_wheel_names(version[0])
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED')
for wheel_name in wheel_names:
wheel_url = os.path.join(ANACONDA_FILES, wheel_name)
wheel_path = os.path.join(wheelhouse, wheel_name)
with open(wheel_path, 'wb') as f:
with http.request('GET', wheel_url, preload_content=False,) as r:
print(f"Downloading {wheel_name}")
shutil.copyfileobj(r, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"version",
help="NumPy version to download.")
parser.add_argument(
"-w", "--wheelhouse",
default=os.path.join(os.getcwd(), "release", "installers"),
help="Directory in which to store downloaded wheels\n"
"[defaults to <cwd>/release/installers]")
args = parser.parse_args()
wheelhouse = os.path.expanduser(args.wheelhouse)
download_wheels(args.version, wheelhouse)
|
|
a49ace410d239a0e8ccce8b2780b0ff2eb9e6dfb
|
protogeni/test/consoleurl.py
|
protogeni/test/consoleurl.py
|
#! /usr/bin/env python
#
# Copyright (c) 2008-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
#
#
import sys
import pwd
import getopt
import os
import re
ACCEPTSLICENAME=1
sliver_urn = None
execfile( "test-common.py" )
if len(REQARGS) != 1:
print >> sys.stderr, "Must provide a sliver urn"
sys.exit(1)
else:
sliver_urn = REQARGS[0]
pass
#
# Get a credential for myself, that allows me to do things at the SA.
#
mycredential = get_self_credential()
print "Got my SA credential. Looking for slice ..."
#
# Lookup slice.
#
myslice = resolve_slice( SLICENAME, mycredential )
print "Found the slice, asking for a credential ..."
#
# Get the slice credential.
#
slicecred = get_slice_credential( myslice, mycredential )
print "Got the slice credential, asking for the sliver console url ..."
#
# Get the console url
#
params = {}
params["slice_urn"] = myslice["urn"]
params["sliver_urn"] = sliver_urn
params["credentials"] = (slicecred,)
rval,response = do_method("cm", "ConsoleURL", params, version="2.0")
if rval:
Fatal("Could not get the console URL")
pass
print str(response["value"])
|
Test script to get console url for a sliver.
|
Test script to get console url for a sliver.
|
Python
|
agpl-3.0
|
nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome,nmc-probe/emulab-nome
|
Test script to get console url for a sliver.
|
#! /usr/bin/env python
#
# Copyright (c) 2008-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
#
#
import sys
import pwd
import getopt
import os
import re
ACCEPTSLICENAME=1
sliver_urn = None
execfile( "test-common.py" )
if len(REQARGS) != 1:
print >> sys.stderr, "Must provide a sliver urn"
sys.exit(1)
else:
sliver_urn = REQARGS[0]
pass
#
# Get a credential for myself, that allows me to do things at the SA.
#
mycredential = get_self_credential()
print "Got my SA credential. Looking for slice ..."
#
# Lookup slice.
#
myslice = resolve_slice( SLICENAME, mycredential )
print "Found the slice, asking for a credential ..."
#
# Get the slice credential.
#
slicecred = get_slice_credential( myslice, mycredential )
print "Got the slice credential, asking for the sliver console url ..."
#
# Get the console url
#
params = {}
params["slice_urn"] = myslice["urn"]
params["sliver_urn"] = sliver_urn
params["credentials"] = (slicecred,)
rval,response = do_method("cm", "ConsoleURL", params, version="2.0")
if rval:
Fatal("Could not get the console URL")
pass
print str(response["value"])
|
<commit_before><commit_msg>Test script to get console url for a sliver.<commit_after>
|
#! /usr/bin/env python
#
# Copyright (c) 2008-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
#
#
import sys
import pwd
import getopt
import os
import re
ACCEPTSLICENAME=1
sliver_urn = None
execfile( "test-common.py" )
if len(REQARGS) != 1:
print >> sys.stderr, "Must provide a sliver urn"
sys.exit(1)
else:
sliver_urn = REQARGS[0]
pass
#
# Get a credential for myself, that allows me to do things at the SA.
#
mycredential = get_self_credential()
print "Got my SA credential. Looking for slice ..."
#
# Lookup slice.
#
myslice = resolve_slice( SLICENAME, mycredential )
print "Found the slice, asking for a credential ..."
#
# Get the slice credential.
#
slicecred = get_slice_credential( myslice, mycredential )
print "Got the slice credential, asking for the sliver console url ..."
#
# Get the console url
#
params = {}
params["slice_urn"] = myslice["urn"]
params["sliver_urn"] = sliver_urn
params["credentials"] = (slicecred,)
rval,response = do_method("cm", "ConsoleURL", params, version="2.0")
if rval:
Fatal("Could not get the console URL")
pass
print str(response["value"])
|
Test script to get console url for a sliver.#! /usr/bin/env python
#
# Copyright (c) 2008-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
#
#
import sys
import pwd
import getopt
import os
import re
ACCEPTSLICENAME=1
sliver_urn = None
execfile( "test-common.py" )
if len(REQARGS) != 1:
print >> sys.stderr, "Must provide a sliver urn"
sys.exit(1)
else:
sliver_urn = REQARGS[0]
pass
#
# Get a credential for myself, that allows me to do things at the SA.
#
mycredential = get_self_credential()
print "Got my SA credential. Looking for slice ..."
#
# Lookup slice.
#
myslice = resolve_slice( SLICENAME, mycredential )
print "Found the slice, asking for a credential ..."
#
# Get the slice credential.
#
slicecred = get_slice_credential( myslice, mycredential )
print "Got the slice credential, asking for the sliver console url ..."
#
# Get the console url
#
params = {}
params["slice_urn"] = myslice["urn"]
params["sliver_urn"] = sliver_urn
params["credentials"] = (slicecred,)
rval,response = do_method("cm", "ConsoleURL", params, version="2.0")
if rval:
Fatal("Could not get the console URL")
pass
print str(response["value"])
|
<commit_before><commit_msg>Test script to get console url for a sliver.<commit_after>#! /usr/bin/env python
#
# Copyright (c) 2008-2014 University of Utah and the Flux Group.
#
# {{{GENIPUBLIC-LICENSE
#
# GENI Public License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#
# }}}
#
#
#
import sys
import pwd
import getopt
import os
import re
ACCEPTSLICENAME=1
sliver_urn = None
execfile( "test-common.py" )
if len(REQARGS) != 1:
print >> sys.stderr, "Must provide a sliver urn"
sys.exit(1)
else:
sliver_urn = REQARGS[0]
pass
#
# Get a credential for myself, that allows me to do things at the SA.
#
mycredential = get_self_credential()
print "Got my SA credential. Looking for slice ..."
#
# Lookup slice.
#
myslice = resolve_slice( SLICENAME, mycredential )
print "Found the slice, asking for a credential ..."
#
# Get the slice credential.
#
slicecred = get_slice_credential( myslice, mycredential )
print "Got the slice credential, asking for the sliver console url ..."
#
# Get the console url
#
params = {}
params["slice_urn"] = myslice["urn"]
params["sliver_urn"] = sliver_urn
params["credentials"] = (slicecred,)
rval,response = do_method("cm", "ConsoleURL", params, version="2.0")
if rval:
Fatal("Could not get the console URL")
pass
print str(response["value"])
|
|
93feee9e6d4221e6c876413055ee8be659ddeb36
|
pythran/tests/rosetta/yin_and_yang.py
|
pythran/tests/rosetta/yin_and_yang.py
|
# from http://rosettacode.org/wiki/Yin_and_yang#Python
import math
def yinyang(n=3):
radii = [i * n for i in [1, 3, 6]]
ranges = [list(range(-r, r+1)) for r in radii]
squares = [[ (x,y) for x in rnge for y in rnge]
for rnge in ranges]
circles = [[ (x,y) for x,y in sqrpoints
if math.hypot(x,y) <= radius ]
for sqrpoints, radius in zip(squares, radii)]
m = {(x,y):' ' for x,y in squares[-1]}
for x,y in circles[-1]:
m[x,y] = '*'
for x,y in circles[-1]:
if x>0: m[(x,y)] = '.'
for x,y in circles[-2]:
m[(x,y+3*n)] = '*'
m[(x,y-3*n)] = '.'
for x,y in circles[-3]:
m[(x,y+3*n)] = '.'
m[(x,y-3*n)] = '*'
return '\n'.join(''.join(m[(x,y)] for x in reversed(ranges[-1])) for y in ranges[-1])
def test():
return yinyang()
|
Handle Y' rosetta code series
|
Handle Y' rosetta code series
A very beautiful one, that once again raised a lot of errors...
|
Python
|
bsd-3-clause
|
pombredanne/pythran,pbrunet/pythran,hainm/pythran,artas360/pythran,pbrunet/pythran,serge-sans-paille/pythran,serge-sans-paille/pythran,pombredanne/pythran,pbrunet/pythran,hainm/pythran,pombredanne/pythran,hainm/pythran,artas360/pythran,artas360/pythran
|
Handle Y' rosetta code series
A very beautiful one, that once again raised a lot of errors...
|
# from http://rosettacode.org/wiki/Yin_and_yang#Python
import math
def yinyang(n=3):
radii = [i * n for i in [1, 3, 6]]
ranges = [list(range(-r, r+1)) for r in radii]
squares = [[ (x,y) for x in rnge for y in rnge]
for rnge in ranges]
circles = [[ (x,y) for x,y in sqrpoints
if math.hypot(x,y) <= radius ]
for sqrpoints, radius in zip(squares, radii)]
m = {(x,y):' ' for x,y in squares[-1]}
for x,y in circles[-1]:
m[x,y] = '*'
for x,y in circles[-1]:
if x>0: m[(x,y)] = '.'
for x,y in circles[-2]:
m[(x,y+3*n)] = '*'
m[(x,y-3*n)] = '.'
for x,y in circles[-3]:
m[(x,y+3*n)] = '.'
m[(x,y-3*n)] = '*'
return '\n'.join(''.join(m[(x,y)] for x in reversed(ranges[-1])) for y in ranges[-1])
def test():
return yinyang()
|
<commit_before><commit_msg>Handle Y' rosetta code series
A very beautiful one, that once again raised a lot of errors...<commit_after>
|
# from http://rosettacode.org/wiki/Yin_and_yang#Python
import math
def yinyang(n=3):
radii = [i * n for i in [1, 3, 6]]
ranges = [list(range(-r, r+1)) for r in radii]
squares = [[ (x,y) for x in rnge for y in rnge]
for rnge in ranges]
circles = [[ (x,y) for x,y in sqrpoints
if math.hypot(x,y) <= radius ]
for sqrpoints, radius in zip(squares, radii)]
m = {(x,y):' ' for x,y in squares[-1]}
for x,y in circles[-1]:
m[x,y] = '*'
for x,y in circles[-1]:
if x>0: m[(x,y)] = '.'
for x,y in circles[-2]:
m[(x,y+3*n)] = '*'
m[(x,y-3*n)] = '.'
for x,y in circles[-3]:
m[(x,y+3*n)] = '.'
m[(x,y-3*n)] = '*'
return '\n'.join(''.join(m[(x,y)] for x in reversed(ranges[-1])) for y in ranges[-1])
def test():
return yinyang()
|
Handle Y' rosetta code series
A very beautiful one, that once again raised a lot of errors...# from http://rosettacode.org/wiki/Yin_and_yang#Python
import math
def yinyang(n=3):
radii = [i * n for i in [1, 3, 6]]
ranges = [list(range(-r, r+1)) for r in radii]
squares = [[ (x,y) for x in rnge for y in rnge]
for rnge in ranges]
circles = [[ (x,y) for x,y in sqrpoints
if math.hypot(x,y) <= radius ]
for sqrpoints, radius in zip(squares, radii)]
m = {(x,y):' ' for x,y in squares[-1]}
for x,y in circles[-1]:
m[x,y] = '*'
for x,y in circles[-1]:
if x>0: m[(x,y)] = '.'
for x,y in circles[-2]:
m[(x,y+3*n)] = '*'
m[(x,y-3*n)] = '.'
for x,y in circles[-3]:
m[(x,y+3*n)] = '.'
m[(x,y-3*n)] = '*'
return '\n'.join(''.join(m[(x,y)] for x in reversed(ranges[-1])) for y in ranges[-1])
def test():
return yinyang()
|
<commit_before><commit_msg>Handle Y' rosetta code series
A very beautiful one, that once again raised a lot of errors...<commit_after># from http://rosettacode.org/wiki/Yin_and_yang#Python
import math
def yinyang(n=3):
radii = [i * n for i in [1, 3, 6]]
ranges = [list(range(-r, r+1)) for r in radii]
squares = [[ (x,y) for x in rnge for y in rnge]
for rnge in ranges]
circles = [[ (x,y) for x,y in sqrpoints
if math.hypot(x,y) <= radius ]
for sqrpoints, radius in zip(squares, radii)]
m = {(x,y):' ' for x,y in squares[-1]}
for x,y in circles[-1]:
m[x,y] = '*'
for x,y in circles[-1]:
if x>0: m[(x,y)] = '.'
for x,y in circles[-2]:
m[(x,y+3*n)] = '*'
m[(x,y-3*n)] = '.'
for x,y in circles[-3]:
m[(x,y+3*n)] = '.'
m[(x,y-3*n)] = '*'
return '\n'.join(''.join(m[(x,y)] for x in reversed(ranges[-1])) for y in ranges[-1])
def test():
return yinyang()
|
|
39bb9d8408cde59525a576119d28399c49e092f9
|
mars.py
|
mars.py
|
import requests
rover_url = 'https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos'
parameters = {'api_key': 'DEMO_KEY', 'sol': '1324'}
response = requests.get(rover_url, params=parameters).json()
print(len(response['photos']))
for photo in response['photos']:
print(photo['img_src'])
|
Print out image URLs from Mars
|
Print out image URLs from Mars
|
Python
|
mit
|
sagnew/PhoningHome
|
Print out image URLs from Mars
|
import requests
rover_url = 'https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos'
parameters = {'api_key': 'DEMO_KEY', 'sol': '1324'}
response = requests.get(rover_url, params=parameters).json()
print(len(response['photos']))
for photo in response['photos']:
print(photo['img_src'])
|
<commit_before><commit_msg>Print out image URLs from Mars<commit_after>
|
import requests
rover_url = 'https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos'
parameters = {'api_key': 'DEMO_KEY', 'sol': '1324'}
response = requests.get(rover_url, params=parameters).json()
print(len(response['photos']))
for photo in response['photos']:
print(photo['img_src'])
|
Print out image URLs from Marsimport requests
rover_url = 'https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos'
parameters = {'api_key': 'DEMO_KEY', 'sol': '1324'}
response = requests.get(rover_url, params=parameters).json()
print(len(response['photos']))
for photo in response['photos']:
print(photo['img_src'])
|
<commit_before><commit_msg>Print out image URLs from Mars<commit_after>import requests
rover_url = 'https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos'
parameters = {'api_key': 'DEMO_KEY', 'sol': '1324'}
response = requests.get(rover_url, params=parameters).json()
print(len(response['photos']))
for photo in response['photos']:
print(photo['img_src'])
|
|
a066aba57d828d25a692ae1dda31797ea7046ddf
|
test.py
|
test.py
|
from ringcentral import SDK
from config import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER
# Before you start
# Rename credentials-sample.ini to credentials.ini
# Edit credentials.ini with information about your app and your creds
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
res = platform.get('/account/~/extension/~')
print('User loaded ' + res.json().name)
|
Test file to get started with APIs
|
Test file to get started with APIs
|
Python
|
mit
|
ringcentral/ringcentral-python
|
Test file to get started with APIs
|
from ringcentral import SDK
from config import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER
# Before you start
# Rename credentials-sample.ini to credentials.ini
# Edit credentials.ini with information about your app and your creds
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
res = platform.get('/account/~/extension/~')
print('User loaded ' + res.json().name)
|
<commit_before><commit_msg>Test file to get started with APIs<commit_after>
|
from ringcentral import SDK
from config import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER
# Before you start
# Rename credentials-sample.ini to credentials.ini
# Edit credentials.ini with information about your app and your creds
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
res = platform.get('/account/~/extension/~')
print('User loaded ' + res.json().name)
|
Test file to get started with APIsfrom ringcentral import SDK
from config import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER
# Before you start
# Rename credentials-sample.ini to credentials.ini
# Edit credentials.ini with information about your app and your creds
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
res = platform.get('/account/~/extension/~')
print('User loaded ' + res.json().name)
|
<commit_before><commit_msg>Test file to get started with APIs<commit_after>from ringcentral import SDK
from config import USERNAME, EXTENSION, PASSWORD, APP_KEY, APP_SECRET, SERVER
# Before you start
# Rename credentials-sample.ini to credentials.ini
# Edit credentials.ini with information about your app and your creds
sdk = SDK(APP_KEY, APP_SECRET, SERVER)
platform = sdk.platform()
platform.login(USERNAME, EXTENSION, PASSWORD)
res = platform.get('/account/~/extension/~')
print('User loaded ' + res.json().name)
|
|
f03734d273914ab5ce31cf0e28f86732ddb35b0c
|
tests/test_casefold.py
|
tests/test_casefold.py
|
# -*- coding: utf-8 -*-
from irc3.testing import BotTestCase
class TestCasefold(BotTestCase):
config = dict(includes=['irc3.plugins.casefold'])
def test_ascii(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'ascii'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan\\123[]56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test[]chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test\\chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test\\chan')
def test_rfc1459(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'rfc1459'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan|123{}56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test{}chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test|chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test|chan')
|
Add testcase for casefold plugin
|
Add testcase for casefold plugin
|
Python
|
mit
|
mrhanky17/irc3,mrhanky17/irc3,gawel/irc3
|
Add testcase for casefold plugin
|
# -*- coding: utf-8 -*-
from irc3.testing import BotTestCase
class TestCasefold(BotTestCase):
config = dict(includes=['irc3.plugins.casefold'])
def test_ascii(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'ascii'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan\\123[]56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test[]chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test\\chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test\\chan')
def test_rfc1459(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'rfc1459'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan|123{}56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test{}chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test|chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test|chan')
|
<commit_before><commit_msg>Add testcase for casefold plugin<commit_after>
|
# -*- coding: utf-8 -*-
from irc3.testing import BotTestCase
class TestCasefold(BotTestCase):
config = dict(includes=['irc3.plugins.casefold'])
def test_ascii(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'ascii'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan\\123[]56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test[]chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test\\chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test\\chan')
def test_rfc1459(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'rfc1459'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan|123{}56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test{}chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test|chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test|chan')
|
Add testcase for casefold plugin# -*- coding: utf-8 -*-
from irc3.testing import BotTestCase
class TestCasefold(BotTestCase):
config = dict(includes=['irc3.plugins.casefold'])
def test_ascii(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'ascii'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan\\123[]56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test[]chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test\\chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test\\chan')
def test_rfc1459(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'rfc1459'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan|123{}56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test{}chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test|chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test|chan')
|
<commit_before><commit_msg>Add testcase for casefold plugin<commit_after># -*- coding: utf-8 -*-
from irc3.testing import BotTestCase
class TestCasefold(BotTestCase):
config = dict(includes=['irc3.plugins.casefold'])
def test_ascii(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'ascii'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan\\123[]56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test[]chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test\\chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test\\chan')
def test_rfc1459(self):
bot = self.callFTU(server_config={'CASEMAPPING': 'rfc1459'})
self.assertEquals(bot.casefold('#testchan\\123[]56'),
'#testchan|123{}56')
self.assertEquals(bot.casefold('#tESt[]chAn'), '#test{}chan')
self.assertEquals(bot.casefold('#TEsT\\CHaN'), '#test|chan')
self.assertEquals(bot.casefold(u'#TEsT\\CHaN'), u'#test|chan')
|
|
980cf50a2bf6c42cbd689c1b7e1abcadd45f0f1e
|
tests/test_frontend.py
|
tests/test_frontend.py
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from pypuppetdbquery import parse
class TestFrontend(unittest.TestCase):
"""
Test suite for `pypuppetdbquery.parse`.
"""
def _parse(self, s, **kwargs):
return parse(s, parser_opts={
'lex_options': {
'debug': False,
'optimize': False,
},
'yacc_options': {
'debug': False,
'optimize': False,
'write_tables': False,
},
}, **kwargs)
def test_empty_queries(self):
out = self._parse('')
self.assertTrue(out is None)
def test_simple_json(self):
out = self._parse('foo=bar')
expect = json.dumps([
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]])
self.assertEqual(out, expect)
def test_simple_raw(self):
out = self._parse('foo=bar', json=False)
expect = [
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]]
self.assertEqual(out, expect)
|
Add unit tests for pypuppetdbquery.parse()
|
Add unit tests for pypuppetdbquery.parse()
|
Python
|
apache-2.0
|
bootc/pypuppetdbquery
|
Add unit tests for pypuppetdbquery.parse()
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from pypuppetdbquery import parse
class TestFrontend(unittest.TestCase):
"""
Test suite for `pypuppetdbquery.parse`.
"""
def _parse(self, s, **kwargs):
return parse(s, parser_opts={
'lex_options': {
'debug': False,
'optimize': False,
},
'yacc_options': {
'debug': False,
'optimize': False,
'write_tables': False,
},
}, **kwargs)
def test_empty_queries(self):
out = self._parse('')
self.assertTrue(out is None)
def test_simple_json(self):
out = self._parse('foo=bar')
expect = json.dumps([
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]])
self.assertEqual(out, expect)
def test_simple_raw(self):
out = self._parse('foo=bar', json=False)
expect = [
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]]
self.assertEqual(out, expect)
|
<commit_before><commit_msg>Add unit tests for pypuppetdbquery.parse()<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from pypuppetdbquery import parse
class TestFrontend(unittest.TestCase):
"""
Test suite for `pypuppetdbquery.parse`.
"""
def _parse(self, s, **kwargs):
return parse(s, parser_opts={
'lex_options': {
'debug': False,
'optimize': False,
},
'yacc_options': {
'debug': False,
'optimize': False,
'write_tables': False,
},
}, **kwargs)
def test_empty_queries(self):
out = self._parse('')
self.assertTrue(out is None)
def test_simple_json(self):
out = self._parse('foo=bar')
expect = json.dumps([
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]])
self.assertEqual(out, expect)
def test_simple_raw(self):
out = self._parse('foo=bar', json=False)
expect = [
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]]
self.assertEqual(out, expect)
|
Add unit tests for pypuppetdbquery.parse()# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from pypuppetdbquery import parse
class TestFrontend(unittest.TestCase):
"""
Test suite for `pypuppetdbquery.parse`.
"""
def _parse(self, s, **kwargs):
return parse(s, parser_opts={
'lex_options': {
'debug': False,
'optimize': False,
},
'yacc_options': {
'debug': False,
'optimize': False,
'write_tables': False,
},
}, **kwargs)
def test_empty_queries(self):
out = self._parse('')
self.assertTrue(out is None)
def test_simple_json(self):
out = self._parse('foo=bar')
expect = json.dumps([
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]])
self.assertEqual(out, expect)
def test_simple_raw(self):
out = self._parse('foo=bar', json=False)
expect = [
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]]
self.assertEqual(out, expect)
|
<commit_before><commit_msg>Add unit tests for pypuppetdbquery.parse()<commit_after># -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from pypuppetdbquery import parse
class TestFrontend(unittest.TestCase):
"""
Test suite for `pypuppetdbquery.parse`.
"""
def _parse(self, s, **kwargs):
return parse(s, parser_opts={
'lex_options': {
'debug': False,
'optimize': False,
},
'yacc_options': {
'debug': False,
'optimize': False,
'write_tables': False,
},
}, **kwargs)
def test_empty_queries(self):
out = self._parse('')
self.assertTrue(out is None)
def test_simple_json(self):
out = self._parse('foo=bar')
expect = json.dumps([
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]])
self.assertEqual(out, expect)
def test_simple_raw(self):
out = self._parse('foo=bar', json=False)
expect = [
'in', 'certname',
['extract', 'certname',
['select_fact_contents',
['and',
['=', 'path', ['foo']],
['=', 'value', 'bar']]]]]
self.assertEqual(out, expect)
|
|
443456f94a92f1844b99965ffcf3679bd939d42c
|
tests/test_moving_eddies.py
|
tests/test_moving_eddies.py
|
from parcels import Particle, ParticleSet, JITParticle, JITParticleSet
from parcels import NEMOGrid, ParticleFile, AdvectionRK4
from argparse import ArgumentParser
pclasses = {'scipy': (Particle, ParticleSet),
'jit': (JITParticle, JITParticleSet)}
def moving_eddies(grid, npart, mode='jit', verbose=False):
"""Configuration of a particle set that follows two moving eddies
:arg grid: :class NEMOGrid: that defines the flow field
:arg npart: Number of particles to intialise"""
# Determine particle and set classes according to mode
ParticleClass, ParticleSetClass = pclasses[mode]
lon = [3.3, 3.3]
lat = [46., 47.8]
pset = ParticleSetClass(2, grid, lon=lon, lat=lat)
if verbose:
print("Initial particle positions:")
for p in pset:
print(p)
out = ParticleFile(name="EddyParticle", particleset=pset)
out.write(pset, 0.)
current = 0.
for _ in range(24 * 25):
pset.execute(AdvectionRK4, time=current,
timesteps=12, dt=300.)
out.write(pset, current)
current += 3600.
if verbose:
print("Final particle positions:")
for p in pset:
print(p)
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('mode', choices=('scipy', 'jit'), nargs='?', default='jit',
help='Execution mode for performing RK4 computation')
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
# Open grid file set
grid = NEMOGrid.from_file('moving_eddies')
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("moving_eddies(grid, args.particles, mode=args.mode, \
verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
moving_eddies(grid, args.particles, mode=args.mode, verbose=args.verbose)
|
Add simple test setup that follows two particles
|
MovingEddies: Add simple test setup that follows two particles
|
Python
|
mit
|
OceanPARCELS/parcels,OceanPARCELS/parcels
|
MovingEddies: Add simple test setup that follows two particles
|
from parcels import Particle, ParticleSet, JITParticle, JITParticleSet
from parcels import NEMOGrid, ParticleFile, AdvectionRK4
from argparse import ArgumentParser
pclasses = {'scipy': (Particle, ParticleSet),
'jit': (JITParticle, JITParticleSet)}
def moving_eddies(grid, npart, mode='jit', verbose=False):
"""Configuration of a particle set that follows two moving eddies
:arg grid: :class NEMOGrid: that defines the flow field
:arg npart: Number of particles to intialise"""
# Determine particle and set classes according to mode
ParticleClass, ParticleSetClass = pclasses[mode]
lon = [3.3, 3.3]
lat = [46., 47.8]
pset = ParticleSetClass(2, grid, lon=lon, lat=lat)
if verbose:
print("Initial particle positions:")
for p in pset:
print(p)
out = ParticleFile(name="EddyParticle", particleset=pset)
out.write(pset, 0.)
current = 0.
for _ in range(24 * 25):
pset.execute(AdvectionRK4, time=current,
timesteps=12, dt=300.)
out.write(pset, current)
current += 3600.
if verbose:
print("Final particle positions:")
for p in pset:
print(p)
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('mode', choices=('scipy', 'jit'), nargs='?', default='jit',
help='Execution mode for performing RK4 computation')
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
# Open grid file set
grid = NEMOGrid.from_file('moving_eddies')
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("moving_eddies(grid, args.particles, mode=args.mode, \
verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
moving_eddies(grid, args.particles, mode=args.mode, verbose=args.verbose)
|
<commit_before><commit_msg>MovingEddies: Add simple test setup that follows two particles<commit_after>
|
from parcels import Particle, ParticleSet, JITParticle, JITParticleSet
from parcels import NEMOGrid, ParticleFile, AdvectionRK4
from argparse import ArgumentParser
pclasses = {'scipy': (Particle, ParticleSet),
'jit': (JITParticle, JITParticleSet)}
def moving_eddies(grid, npart, mode='jit', verbose=False):
"""Configuration of a particle set that follows two moving eddies
:arg grid: :class NEMOGrid: that defines the flow field
:arg npart: Number of particles to intialise"""
# Determine particle and set classes according to mode
ParticleClass, ParticleSetClass = pclasses[mode]
lon = [3.3, 3.3]
lat = [46., 47.8]
pset = ParticleSetClass(2, grid, lon=lon, lat=lat)
if verbose:
print("Initial particle positions:")
for p in pset:
print(p)
out = ParticleFile(name="EddyParticle", particleset=pset)
out.write(pset, 0.)
current = 0.
for _ in range(24 * 25):
pset.execute(AdvectionRK4, time=current,
timesteps=12, dt=300.)
out.write(pset, current)
current += 3600.
if verbose:
print("Final particle positions:")
for p in pset:
print(p)
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('mode', choices=('scipy', 'jit'), nargs='?', default='jit',
help='Execution mode for performing RK4 computation')
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
# Open grid file set
grid = NEMOGrid.from_file('moving_eddies')
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("moving_eddies(grid, args.particles, mode=args.mode, \
verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
moving_eddies(grid, args.particles, mode=args.mode, verbose=args.verbose)
|
MovingEddies: Add simple test setup that follows two particlesfrom parcels import Particle, ParticleSet, JITParticle, JITParticleSet
from parcels import NEMOGrid, ParticleFile, AdvectionRK4
from argparse import ArgumentParser
pclasses = {'scipy': (Particle, ParticleSet),
'jit': (JITParticle, JITParticleSet)}
def moving_eddies(grid, npart, mode='jit', verbose=False):
"""Configuration of a particle set that follows two moving eddies
:arg grid: :class NEMOGrid: that defines the flow field
:arg npart: Number of particles to intialise"""
# Determine particle and set classes according to mode
ParticleClass, ParticleSetClass = pclasses[mode]
lon = [3.3, 3.3]
lat = [46., 47.8]
pset = ParticleSetClass(2, grid, lon=lon, lat=lat)
if verbose:
print("Initial particle positions:")
for p in pset:
print(p)
out = ParticleFile(name="EddyParticle", particleset=pset)
out.write(pset, 0.)
current = 0.
for _ in range(24 * 25):
pset.execute(AdvectionRK4, time=current,
timesteps=12, dt=300.)
out.write(pset, current)
current += 3600.
if verbose:
print("Final particle positions:")
for p in pset:
print(p)
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('mode', choices=('scipy', 'jit'), nargs='?', default='jit',
help='Execution mode for performing RK4 computation')
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
# Open grid file set
grid = NEMOGrid.from_file('moving_eddies')
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("moving_eddies(grid, args.particles, mode=args.mode, \
verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
moving_eddies(grid, args.particles, mode=args.mode, verbose=args.verbose)
|
<commit_before><commit_msg>MovingEddies: Add simple test setup that follows two particles<commit_after>from parcels import Particle, ParticleSet, JITParticle, JITParticleSet
from parcels import NEMOGrid, ParticleFile, AdvectionRK4
from argparse import ArgumentParser
pclasses = {'scipy': (Particle, ParticleSet),
'jit': (JITParticle, JITParticleSet)}
def moving_eddies(grid, npart, mode='jit', verbose=False):
"""Configuration of a particle set that follows two moving eddies
:arg grid: :class NEMOGrid: that defines the flow field
:arg npart: Number of particles to intialise"""
# Determine particle and set classes according to mode
ParticleClass, ParticleSetClass = pclasses[mode]
lon = [3.3, 3.3]
lat = [46., 47.8]
pset = ParticleSetClass(2, grid, lon=lon, lat=lat)
if verbose:
print("Initial particle positions:")
for p in pset:
print(p)
out = ParticleFile(name="EddyParticle", particleset=pset)
out.write(pset, 0.)
current = 0.
for _ in range(24 * 25):
pset.execute(AdvectionRK4, time=current,
timesteps=12, dt=300.)
out.write(pset, current)
current += 3600.
if verbose:
print("Final particle positions:")
for p in pset:
print(p)
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('mode', choices=('scipy', 'jit'), nargs='?', default='jit',
help='Execution mode for performing RK4 computation')
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
# Open grid file set
grid = NEMOGrid.from_file('moving_eddies')
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("moving_eddies(grid, args.particles, mode=args.mode, \
verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
moving_eddies(grid, args.particles, mode=args.mode, verbose=args.verbose)
|
|
5735595d4a465b2cc9f52bdd1fc524c0d26d60cb
|
tests/test_task_statuses.py
|
tests/test_task_statuses.py
|
from taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import TaskStatus, TaskStatuses
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestTaskStatuses(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_task_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = TaskStatus(rm)
ts = TaskStatuses(rm).create(1, 'TS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'TS 1'}
)
|
Add tests for task statuses
|
Add tests for task statuses
|
Python
|
mit
|
bameda/python-taiga,mlq/python-taiga,jespino/python-taiga,bameda/python-taiga,mlq/python-taiga,erikw/python-taiga,jespino/python-taiga,erikw/python-taiga,nephila/python-taiga
|
Add tests for task statuses
|
from taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import TaskStatus, TaskStatuses
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestTaskStatuses(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_task_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = TaskStatus(rm)
ts = TaskStatuses(rm).create(1, 'TS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'TS 1'}
)
|
<commit_before><commit_msg>Add tests for task statuses<commit_after>
|
from taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import TaskStatus, TaskStatuses
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestTaskStatuses(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_task_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = TaskStatus(rm)
ts = TaskStatuses(rm).create(1, 'TS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'TS 1'}
)
|
Add tests for task statusesfrom taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import TaskStatus, TaskStatuses
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestTaskStatuses(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_task_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = TaskStatus(rm)
ts = TaskStatuses(rm).create(1, 'TS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'TS 1'}
)
|
<commit_before><commit_msg>Add tests for task statuses<commit_after>from taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import TaskStatus, TaskStatuses
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestTaskStatuses(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_task_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = TaskStatus(rm)
ts = TaskStatuses(rm).create(1, 'TS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'TS 1'}
)
|
|
96ed9d514541b18419b97d5187237f6fdba7b3c7
|
tohu/v3/dependency_graph.py
|
tohu/v3/dependency_graph.py
|
import logging
import re
from graphviz import Digraph
from IPython.display import SVG
__all__ = ['DependencyGraph']
logger = logging.getLogger('tohu')
class DependencyGraph:
NODE_ATTR = dict(shape='box', style='filled', fillcolor='white')
def __init__(self, *, scale=1.0, name=None, graph_attr=None):
graph_attr = graph_attr or dict()
graph_attr['splines'] = 'ortho'
self.graph = Digraph(name=name, node_attr=self.NODE_ATTR, graph_attr=graph_attr)
self.scale = scale
def __repr__(self):
return f'<DependencyGraph>'
def _repr_svg_(self):
return self.get_svg(scale=self.scale)
def add_node(self, g):
self.graph.node(g.tohu_id, label=f'{g:long}')
def add_edge(self, g1, g2, *, color='/paired3/2', style='solid', constraint=None):
self.graph.edge(g1.tohu_id, g2.tohu_id, color=color, style=style, constraint=constraint)
def add_subgraph(self, sg):
assert isinstance(sg, DependencyGraph)
self.graph.subgraph(sg.graph)
def get_svg(self, scale=1.0):
"""
Return string with an SVG representation of the graph.
"""
svg = self.graph._repr_svg_()
width, height = re.search('svg width="(\d+)pt" height="(\d+)pt"', svg).groups()
width_new = int(scale * int(width))
height_new = int(scale * int(height))
svg_scaled = re.sub('svg width="\d+pt" height="\d+pt"', f'svg width="{width_new}pt" height="{height_new}pt"', svg)
return svg_scaled
def draw(self, scale=1.0):
"""
Convenience method to draw a - possibly scaled - version of the graph in Jupyter notebooks.
Returns an SVG object which is rendered automatically by the notebook.
"""
return SVG(self.get_svg(scale=scale))
|
Add class to support dependency graph visualisation
|
Add class to support dependency graph visualisation
|
Python
|
mit
|
maxalbert/tohu
|
Add class to support dependency graph visualisation
|
import logging
import re
from graphviz import Digraph
from IPython.display import SVG
__all__ = ['DependencyGraph']
logger = logging.getLogger('tohu')
class DependencyGraph:
NODE_ATTR = dict(shape='box', style='filled', fillcolor='white')
def __init__(self, *, scale=1.0, name=None, graph_attr=None):
graph_attr = graph_attr or dict()
graph_attr['splines'] = 'ortho'
self.graph = Digraph(name=name, node_attr=self.NODE_ATTR, graph_attr=graph_attr)
self.scale = scale
def __repr__(self):
return f'<DependencyGraph>'
def _repr_svg_(self):
return self.get_svg(scale=self.scale)
def add_node(self, g):
self.graph.node(g.tohu_id, label=f'{g:long}')
def add_edge(self, g1, g2, *, color='/paired3/2', style='solid', constraint=None):
self.graph.edge(g1.tohu_id, g2.tohu_id, color=color, style=style, constraint=constraint)
def add_subgraph(self, sg):
assert isinstance(sg, DependencyGraph)
self.graph.subgraph(sg.graph)
def get_svg(self, scale=1.0):
"""
Return string with an SVG representation of the graph.
"""
svg = self.graph._repr_svg_()
width, height = re.search('svg width="(\d+)pt" height="(\d+)pt"', svg).groups()
width_new = int(scale * int(width))
height_new = int(scale * int(height))
svg_scaled = re.sub('svg width="\d+pt" height="\d+pt"', f'svg width="{width_new}pt" height="{height_new}pt"', svg)
return svg_scaled
def draw(self, scale=1.0):
"""
Convenience method to draw a - possibly scaled - version of the graph in Jupyter notebooks.
Returns an SVG object which is rendered automatically by the notebook.
"""
return SVG(self.get_svg(scale=scale))
|
<commit_before><commit_msg>Add class to support dependency graph visualisation<commit_after>
|
import logging
import re
from graphviz import Digraph
from IPython.display import SVG
__all__ = ['DependencyGraph']
logger = logging.getLogger('tohu')
class DependencyGraph:
NODE_ATTR = dict(shape='box', style='filled', fillcolor='white')
def __init__(self, *, scale=1.0, name=None, graph_attr=None):
graph_attr = graph_attr or dict()
graph_attr['splines'] = 'ortho'
self.graph = Digraph(name=name, node_attr=self.NODE_ATTR, graph_attr=graph_attr)
self.scale = scale
def __repr__(self):
return f'<DependencyGraph>'
def _repr_svg_(self):
return self.get_svg(scale=self.scale)
def add_node(self, g):
self.graph.node(g.tohu_id, label=f'{g:long}')
def add_edge(self, g1, g2, *, color='/paired3/2', style='solid', constraint=None):
self.graph.edge(g1.tohu_id, g2.tohu_id, color=color, style=style, constraint=constraint)
def add_subgraph(self, sg):
assert isinstance(sg, DependencyGraph)
self.graph.subgraph(sg.graph)
def get_svg(self, scale=1.0):
"""
Return string with an SVG representation of the graph.
"""
svg = self.graph._repr_svg_()
width, height = re.search('svg width="(\d+)pt" height="(\d+)pt"', svg).groups()
width_new = int(scale * int(width))
height_new = int(scale * int(height))
svg_scaled = re.sub('svg width="\d+pt" height="\d+pt"', f'svg width="{width_new}pt" height="{height_new}pt"', svg)
return svg_scaled
def draw(self, scale=1.0):
"""
Convenience method to draw a - possibly scaled - version of the graph in Jupyter notebooks.
Returns an SVG object which is rendered automatically by the notebook.
"""
return SVG(self.get_svg(scale=scale))
|
Add class to support dependency graph visualisationimport logging
import re
from graphviz import Digraph
from IPython.display import SVG
__all__ = ['DependencyGraph']
logger = logging.getLogger('tohu')
class DependencyGraph:
NODE_ATTR = dict(shape='box', style='filled', fillcolor='white')
def __init__(self, *, scale=1.0, name=None, graph_attr=None):
graph_attr = graph_attr or dict()
graph_attr['splines'] = 'ortho'
self.graph = Digraph(name=name, node_attr=self.NODE_ATTR, graph_attr=graph_attr)
self.scale = scale
def __repr__(self):
return f'<DependencyGraph>'
def _repr_svg_(self):
return self.get_svg(scale=self.scale)
def add_node(self, g):
self.graph.node(g.tohu_id, label=f'{g:long}')
def add_edge(self, g1, g2, *, color='/paired3/2', style='solid', constraint=None):
self.graph.edge(g1.tohu_id, g2.tohu_id, color=color, style=style, constraint=constraint)
def add_subgraph(self, sg):
assert isinstance(sg, DependencyGraph)
self.graph.subgraph(sg.graph)
def get_svg(self, scale=1.0):
"""
Return string with an SVG representation of the graph.
"""
svg = self.graph._repr_svg_()
width, height = re.search('svg width="(\d+)pt" height="(\d+)pt"', svg).groups()
width_new = int(scale * int(width))
height_new = int(scale * int(height))
svg_scaled = re.sub('svg width="\d+pt" height="\d+pt"', f'svg width="{width_new}pt" height="{height_new}pt"', svg)
return svg_scaled
def draw(self, scale=1.0):
"""
Convenience method to draw a - possibly scaled - version of the graph in Jupyter notebooks.
Returns an SVG object which is rendered automatically by the notebook.
"""
return SVG(self.get_svg(scale=scale))
|
<commit_before><commit_msg>Add class to support dependency graph visualisation<commit_after>import logging
import re
from graphviz import Digraph
from IPython.display import SVG
__all__ = ['DependencyGraph']
logger = logging.getLogger('tohu')
class DependencyGraph:
NODE_ATTR = dict(shape='box', style='filled', fillcolor='white')
def __init__(self, *, scale=1.0, name=None, graph_attr=None):
graph_attr = graph_attr or dict()
graph_attr['splines'] = 'ortho'
self.graph = Digraph(name=name, node_attr=self.NODE_ATTR, graph_attr=graph_attr)
self.scale = scale
def __repr__(self):
return f'<DependencyGraph>'
def _repr_svg_(self):
return self.get_svg(scale=self.scale)
def add_node(self, g):
self.graph.node(g.tohu_id, label=f'{g:long}')
def add_edge(self, g1, g2, *, color='/paired3/2', style='solid', constraint=None):
self.graph.edge(g1.tohu_id, g2.tohu_id, color=color, style=style, constraint=constraint)
def add_subgraph(self, sg):
assert isinstance(sg, DependencyGraph)
self.graph.subgraph(sg.graph)
def get_svg(self, scale=1.0):
"""
Return string with an SVG representation of the graph.
"""
svg = self.graph._repr_svg_()
width, height = re.search('svg width="(\d+)pt" height="(\d+)pt"', svg).groups()
width_new = int(scale * int(width))
height_new = int(scale * int(height))
svg_scaled = re.sub('svg width="\d+pt" height="\d+pt"', f'svg width="{width_new}pt" height="{height_new}pt"', svg)
return svg_scaled
def draw(self, scale=1.0):
"""
Convenience method to draw a - possibly scaled - version of the graph in Jupyter notebooks.
Returns an SVG object which is rendered automatically by the notebook.
"""
return SVG(self.get_svg(scale=scale))
|
|
b54f1755d6f792a97eaa06a83cadbb72d045402f
|
testtube/decorators.py
|
testtube/decorators.py
|
class RequireModule(object):
"""Decorator that raises import error if specified module isn't found."""
def __init__(self, module_name):
self.module_name = module_name
def _require_module(self):
try:
__import__(self.module_name)
except ImportError:
raise ImportError(
'%s must be installed to use this helper.' % self.module_name)
def __call__(self, func, *args, **kwargs):
def wrapper(*args, **kwargs):
self._require_module()
return func(*args, **kwargs)
return wrapper
|
Add a RequireModule decorator for use by helpers
|
Add a RequireModule decorator for use by helpers
This eliminates the need for _require in helpers.
|
Python
|
mit
|
blaix/testtube,thomasw/testtube,beck/testtube
|
Add a RequireModule decorator for use by helpers
This eliminates the need for _require in helpers.
|
class RequireModule(object):
"""Decorator that raises import error if specified module isn't found."""
def __init__(self, module_name):
self.module_name = module_name
def _require_module(self):
try:
__import__(self.module_name)
except ImportError:
raise ImportError(
'%s must be installed to use this helper.' % self.module_name)
def __call__(self, func, *args, **kwargs):
def wrapper(*args, **kwargs):
self._require_module()
return func(*args, **kwargs)
return wrapper
|
<commit_before><commit_msg>Add a RequireModule decorator for use by helpers
This eliminates the need for _require in helpers.<commit_after>
|
class RequireModule(object):
"""Decorator that raises import error if specified module isn't found."""
def __init__(self, module_name):
self.module_name = module_name
def _require_module(self):
try:
__import__(self.module_name)
except ImportError:
raise ImportError(
'%s must be installed to use this helper.' % self.module_name)
def __call__(self, func, *args, **kwargs):
def wrapper(*args, **kwargs):
self._require_module()
return func(*args, **kwargs)
return wrapper
|
Add a RequireModule decorator for use by helpers
This eliminates the need for _require in helpers.class RequireModule(object):
"""Decorator that raises import error if specified module isn't found."""
def __init__(self, module_name):
self.module_name = module_name
def _require_module(self):
try:
__import__(self.module_name)
except ImportError:
raise ImportError(
'%s must be installed to use this helper.' % self.module_name)
def __call__(self, func, *args, **kwargs):
def wrapper(*args, **kwargs):
self._require_module()
return func(*args, **kwargs)
return wrapper
|
<commit_before><commit_msg>Add a RequireModule decorator for use by helpers
This eliminates the need for _require in helpers.<commit_after>class RequireModule(object):
"""Decorator that raises import error if specified module isn't found."""
def __init__(self, module_name):
self.module_name = module_name
def _require_module(self):
try:
__import__(self.module_name)
except ImportError:
raise ImportError(
'%s must be installed to use this helper.' % self.module_name)
def __call__(self, func, *args, **kwargs):
def wrapper(*args, **kwargs):
self._require_module()
return func(*args, **kwargs)
return wrapper
|
|
55ee1a6609a5dfd922cce78fd4faeb64cd2d3c7d
|
run.py
|
run.py
|
"""Hacked-together development server for feedreader.
Runs the feedreader server under the /api prefix, serves URI not containing a
dot public/index.html, servers everything else to public.
"""
import tornado.ioloop
import tornado.web
import feedreader.main
class PrefixedFallbackHandler(tornado.web.FallbackHandler):
"""FallbackHandler that removes the given prefix from requests."""
def prepare(self):
# hacky way of removing /api/
self.request.uri = self.request.uri[4:]
self.request.path = self.request.path[4:]
super(PrefixedFallbackHandler, self).prepare()
class SingleFileHandler(tornado.web.StaticFileHandler):
"""FileHandler that only reads a single static file."""
@classmethod
def get_absolute_path(cls, root, path):
return tornado.web.StaticFileHandler.get_absolute_path(root,
"index.html")
def main():
feedreader_app = feedreader.main.get_application()
application = tornado.web.Application([
(r"/api/(.*)", PrefixedFallbackHandler, dict(fallback=feedreader_app)),
(r"/(.*\..*)", tornado.web.StaticFileHandler, {"path": "public"}),
(r"/(.*)", SingleFileHandler, {"path": "public"}),
])
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
Add new dev server to replace tape
|
Add new dev server to replace tape
|
Python
|
mit
|
tdryer/feeder,tdryer/feeder
|
Add new dev server to replace tape
|
"""Hacked-together development server for feedreader.
Runs the feedreader server under the /api prefix, serves URI not containing a
dot public/index.html, servers everything else to public.
"""
import tornado.ioloop
import tornado.web
import feedreader.main
class PrefixedFallbackHandler(tornado.web.FallbackHandler):
"""FallbackHandler that removes the given prefix from requests."""
def prepare(self):
# hacky way of removing /api/
self.request.uri = self.request.uri[4:]
self.request.path = self.request.path[4:]
super(PrefixedFallbackHandler, self).prepare()
class SingleFileHandler(tornado.web.StaticFileHandler):
"""FileHandler that only reads a single static file."""
@classmethod
def get_absolute_path(cls, root, path):
return tornado.web.StaticFileHandler.get_absolute_path(root,
"index.html")
def main():
feedreader_app = feedreader.main.get_application()
application = tornado.web.Application([
(r"/api/(.*)", PrefixedFallbackHandler, dict(fallback=feedreader_app)),
(r"/(.*\..*)", tornado.web.StaticFileHandler, {"path": "public"}),
(r"/(.*)", SingleFileHandler, {"path": "public"}),
])
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add new dev server to replace tape<commit_after>
|
"""Hacked-together development server for feedreader.
Runs the feedreader server under the /api prefix, serves URI not containing a
dot public/index.html, servers everything else to public.
"""
import tornado.ioloop
import tornado.web
import feedreader.main
class PrefixedFallbackHandler(tornado.web.FallbackHandler):
"""FallbackHandler that removes the given prefix from requests."""
def prepare(self):
# hacky way of removing /api/
self.request.uri = self.request.uri[4:]
self.request.path = self.request.path[4:]
super(PrefixedFallbackHandler, self).prepare()
class SingleFileHandler(tornado.web.StaticFileHandler):
"""FileHandler that only reads a single static file."""
@classmethod
def get_absolute_path(cls, root, path):
return tornado.web.StaticFileHandler.get_absolute_path(root,
"index.html")
def main():
feedreader_app = feedreader.main.get_application()
application = tornado.web.Application([
(r"/api/(.*)", PrefixedFallbackHandler, dict(fallback=feedreader_app)),
(r"/(.*\..*)", tornado.web.StaticFileHandler, {"path": "public"}),
(r"/(.*)", SingleFileHandler, {"path": "public"}),
])
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
Add new dev server to replace tape"""Hacked-together development server for feedreader.
Runs the feedreader server under the /api prefix, serves URI not containing a
dot public/index.html, servers everything else to public.
"""
import tornado.ioloop
import tornado.web
import feedreader.main
class PrefixedFallbackHandler(tornado.web.FallbackHandler):
"""FallbackHandler that removes the given prefix from requests."""
def prepare(self):
# hacky way of removing /api/
self.request.uri = self.request.uri[4:]
self.request.path = self.request.path[4:]
super(PrefixedFallbackHandler, self).prepare()
class SingleFileHandler(tornado.web.StaticFileHandler):
"""FileHandler that only reads a single static file."""
@classmethod
def get_absolute_path(cls, root, path):
return tornado.web.StaticFileHandler.get_absolute_path(root,
"index.html")
def main():
feedreader_app = feedreader.main.get_application()
application = tornado.web.Application([
(r"/api/(.*)", PrefixedFallbackHandler, dict(fallback=feedreader_app)),
(r"/(.*\..*)", tornado.web.StaticFileHandler, {"path": "public"}),
(r"/(.*)", SingleFileHandler, {"path": "public"}),
])
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add new dev server to replace tape<commit_after>"""Hacked-together development server for feedreader.
Runs the feedreader server under the /api prefix, serves URI not containing a
dot public/index.html, servers everything else to public.
"""
import tornado.ioloop
import tornado.web
import feedreader.main
class PrefixedFallbackHandler(tornado.web.FallbackHandler):
"""FallbackHandler that removes the given prefix from requests."""
def prepare(self):
# hacky way of removing /api/
self.request.uri = self.request.uri[4:]
self.request.path = self.request.path[4:]
super(PrefixedFallbackHandler, self).prepare()
class SingleFileHandler(tornado.web.StaticFileHandler):
"""FileHandler that only reads a single static file."""
@classmethod
def get_absolute_path(cls, root, path):
return tornado.web.StaticFileHandler.get_absolute_path(root,
"index.html")
def main():
feedreader_app = feedreader.main.get_application()
application = tornado.web.Application([
(r"/api/(.*)", PrefixedFallbackHandler, dict(fallback=feedreader_app)),
(r"/(.*\..*)", tornado.web.StaticFileHandler, {"path": "public"}),
(r"/(.*)", SingleFileHandler, {"path": "public"}),
])
application.listen(8080)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
|
3764459fb9a8995a1476cc7a36c88a98dc9d4685
|
scipy-example/fitsincos.py
|
scipy-example/fitsincos.py
|
""" Given the set of points generated by
f(x) = 0.5 * cos(2 * x) + 2 * sin(0.5 * x) with some noise,
use Levenberg-Marquardt algorithm to find the model of the form
f(x) = a * cos(b * x) + b * sin(a * x) to fit all the points.
"""
import numpy as np
import scipy.optimize as scipy_opt
def sincos_func(x_data, a, b):
""" Computes the function a * sin(b * x) + b * cos(a * x)
Args:
x_data : A Numpy array of input data
a : Real-valued argument of the function
b : Real-valued argument of the function
Returns:
A Numpy array of values of the function a * sin(b * x) + b * cos(a * x)
evaluated at each x in xData
"""
return a * np.cos(b * x_data) + b * np.sin(a * x_data)
def main():
""" Main function to set up data points and calls Scipy curve fitting
routine (whose underlying algorithm is Levenberg-Marquardt)
"""
x_data = np.array([
1.0, 1.5, -1.0, 2.0, 1.8, 2.5, -0.5, -0.8, -1.1, 2.2, 2.6, 2.8, -2.0,
-2.2, -1.7, -1.4, 0.05, 0.0, 1.570796, -1.570796, 0.6, -0.6,
1.67, 2.4, 0.1
])
y_data = np.array([
0.76, 0.860000, -1.18, 1.356, 1.118, 2.039, -0.224, -0.7934, -1.339,
1.63, 2.1613, 2.35, -2.009, -1.936, -1.985, -1.759, 0.55, 0.5, 0.914,
-1.9142, 0.77, -0.4, 1.0, 1.9, 0.59
])
guess_abs = [[0.25, 1.5], [1.7, 3], [10, 5], [0.0, 3.0]]
for guess_ab in guess_abs:
ab, covariance = scipy_opt.curve_fit(
sincos_func, x_data, y_data, guess_ab)
print 'Intial guess: %s' % str(guess_ab)
print 'LM results: %s' % str(ab)
if __name__ == "__main__":
main()
|
Add scipy sin cos example
|
Add scipy sin cos example
|
Python
|
mit
|
truongduy134/levenberg-marquardt,truongduy134/levenberg-marquardt,truongduy134/levenberg-marquardt
|
Add scipy sin cos example
|
""" Given the set of points generated by
f(x) = 0.5 * cos(2 * x) + 2 * sin(0.5 * x) with some noise,
use Levenberg-Marquardt algorithm to find the model of the form
f(x) = a * cos(b * x) + b * sin(a * x) to fit all the points.
"""
import numpy as np
import scipy.optimize as scipy_opt
def sincos_func(x_data, a, b):
""" Computes the function a * sin(b * x) + b * cos(a * x)
Args:
x_data : A Numpy array of input data
a : Real-valued argument of the function
b : Real-valued argument of the function
Returns:
A Numpy array of values of the function a * sin(b * x) + b * cos(a * x)
evaluated at each x in xData
"""
return a * np.cos(b * x_data) + b * np.sin(a * x_data)
def main():
""" Main function to set up data points and calls Scipy curve fitting
routine (whose underlying algorithm is Levenberg-Marquardt)
"""
x_data = np.array([
1.0, 1.5, -1.0, 2.0, 1.8, 2.5, -0.5, -0.8, -1.1, 2.2, 2.6, 2.8, -2.0,
-2.2, -1.7, -1.4, 0.05, 0.0, 1.570796, -1.570796, 0.6, -0.6,
1.67, 2.4, 0.1
])
y_data = np.array([
0.76, 0.860000, -1.18, 1.356, 1.118, 2.039, -0.224, -0.7934, -1.339,
1.63, 2.1613, 2.35, -2.009, -1.936, -1.985, -1.759, 0.55, 0.5, 0.914,
-1.9142, 0.77, -0.4, 1.0, 1.9, 0.59
])
guess_abs = [[0.25, 1.5], [1.7, 3], [10, 5], [0.0, 3.0]]
for guess_ab in guess_abs:
ab, covariance = scipy_opt.curve_fit(
sincos_func, x_data, y_data, guess_ab)
print 'Intial guess: %s' % str(guess_ab)
print 'LM results: %s' % str(ab)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add scipy sin cos example<commit_after>
|
""" Given the set of points generated by
f(x) = 0.5 * cos(2 * x) + 2 * sin(0.5 * x) with some noise,
use Levenberg-Marquardt algorithm to find the model of the form
f(x) = a * cos(b * x) + b * sin(a * x) to fit all the points.
"""
import numpy as np
import scipy.optimize as scipy_opt
def sincos_func(x_data, a, b):
""" Computes the function a * sin(b * x) + b * cos(a * x)
Args:
x_data : A Numpy array of input data
a : Real-valued argument of the function
b : Real-valued argument of the function
Returns:
A Numpy array of values of the function a * sin(b * x) + b * cos(a * x)
evaluated at each x in xData
"""
return a * np.cos(b * x_data) + b * np.sin(a * x_data)
def main():
""" Main function to set up data points and calls Scipy curve fitting
routine (whose underlying algorithm is Levenberg-Marquardt)
"""
x_data = np.array([
1.0, 1.5, -1.0, 2.0, 1.8, 2.5, -0.5, -0.8, -1.1, 2.2, 2.6, 2.8, -2.0,
-2.2, -1.7, -1.4, 0.05, 0.0, 1.570796, -1.570796, 0.6, -0.6,
1.67, 2.4, 0.1
])
y_data = np.array([
0.76, 0.860000, -1.18, 1.356, 1.118, 2.039, -0.224, -0.7934, -1.339,
1.63, 2.1613, 2.35, -2.009, -1.936, -1.985, -1.759, 0.55, 0.5, 0.914,
-1.9142, 0.77, -0.4, 1.0, 1.9, 0.59
])
guess_abs = [[0.25, 1.5], [1.7, 3], [10, 5], [0.0, 3.0]]
for guess_ab in guess_abs:
ab, covariance = scipy_opt.curve_fit(
sincos_func, x_data, y_data, guess_ab)
print 'Intial guess: %s' % str(guess_ab)
print 'LM results: %s' % str(ab)
if __name__ == "__main__":
main()
|
Add scipy sin cos example""" Given the set of points generated by
f(x) = 0.5 * cos(2 * x) + 2 * sin(0.5 * x) with some noise,
use Levenberg-Marquardt algorithm to find the model of the form
f(x) = a * cos(b * x) + b * sin(a * x) to fit all the points.
"""
import numpy as np
import scipy.optimize as scipy_opt
def sincos_func(x_data, a, b):
""" Computes the function a * sin(b * x) + b * cos(a * x)
Args:
x_data : A Numpy array of input data
a : Real-valued argument of the function
b : Real-valued argument of the function
Returns:
A Numpy array of values of the function a * sin(b * x) + b * cos(a * x)
evaluated at each x in xData
"""
return a * np.cos(b * x_data) + b * np.sin(a * x_data)
def main():
""" Main function to set up data points and calls Scipy curve fitting
routine (whose underlying algorithm is Levenberg-Marquardt)
"""
x_data = np.array([
1.0, 1.5, -1.0, 2.0, 1.8, 2.5, -0.5, -0.8, -1.1, 2.2, 2.6, 2.8, -2.0,
-2.2, -1.7, -1.4, 0.05, 0.0, 1.570796, -1.570796, 0.6, -0.6,
1.67, 2.4, 0.1
])
y_data = np.array([
0.76, 0.860000, -1.18, 1.356, 1.118, 2.039, -0.224, -0.7934, -1.339,
1.63, 2.1613, 2.35, -2.009, -1.936, -1.985, -1.759, 0.55, 0.5, 0.914,
-1.9142, 0.77, -0.4, 1.0, 1.9, 0.59
])
guess_abs = [[0.25, 1.5], [1.7, 3], [10, 5], [0.0, 3.0]]
for guess_ab in guess_abs:
ab, covariance = scipy_opt.curve_fit(
sincos_func, x_data, y_data, guess_ab)
print 'Intial guess: %s' % str(guess_ab)
print 'LM results: %s' % str(ab)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add scipy sin cos example<commit_after>""" Given the set of points generated by
f(x) = 0.5 * cos(2 * x) + 2 * sin(0.5 * x) with some noise,
use Levenberg-Marquardt algorithm to find the model of the form
f(x) = a * cos(b * x) + b * sin(a * x) to fit all the points.
"""
import numpy as np
import scipy.optimize as scipy_opt
def sincos_func(x_data, a, b):
""" Computes the function a * sin(b * x) + b * cos(a * x)
Args:
x_data : A Numpy array of input data
a : Real-valued argument of the function
b : Real-valued argument of the function
Returns:
A Numpy array of values of the function a * sin(b * x) + b * cos(a * x)
evaluated at each x in xData
"""
return a * np.cos(b * x_data) + b * np.sin(a * x_data)
def main():
""" Main function to set up data points and calls Scipy curve fitting
routine (whose underlying algorithm is Levenberg-Marquardt)
"""
x_data = np.array([
1.0, 1.5, -1.0, 2.0, 1.8, 2.5, -0.5, -0.8, -1.1, 2.2, 2.6, 2.8, -2.0,
-2.2, -1.7, -1.4, 0.05, 0.0, 1.570796, -1.570796, 0.6, -0.6,
1.67, 2.4, 0.1
])
y_data = np.array([
0.76, 0.860000, -1.18, 1.356, 1.118, 2.039, -0.224, -0.7934, -1.339,
1.63, 2.1613, 2.35, -2.009, -1.936, -1.985, -1.759, 0.55, 0.5, 0.914,
-1.9142, 0.77, -0.4, 1.0, 1.9, 0.59
])
guess_abs = [[0.25, 1.5], [1.7, 3], [10, 5], [0.0, 3.0]]
for guess_ab in guess_abs:
ab, covariance = scipy_opt.curve_fit(
sincos_func, x_data, y_data, guess_ab)
print 'Intial guess: %s' % str(guess_ab)
print 'LM results: %s' % str(ab)
if __name__ == "__main__":
main()
|
|
4091fa177dcaeaac0a36fefee8f5b3cac4c6202b
|
oscar/apps/dashboard/catalogue/fields.py
|
oscar/apps/dashboard/catalogue/fields.py
|
from django import forms
from django.template import Context, Template
class ProductImageMultipleChoiceField(forms.ModelMultipleChoiceField):
"""
Field that renders a ProductImage as a thumbnail and text instead of
just as text.
"""
# Using the low-level Template API instead of storing it in a separate file
# A user might want to override this, so perhaps it should be a 'partial' template
_template = Template("""
{% load thumbnail %}
{% thumbnail image.original "50x50" crop="center" as thumb %}
<img src="{{ thumb.url }}" alt="{{ image }}" /> {{ image.original }}
{% endthumbnail %}
""")
def __init__(self, *args, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = forms.CheckboxSelectMultiple()
if 'required' not in kwargs:
kwargs['required'] = False
super(ProductImageMultipleChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return self._template.render(Context({
'image': obj
}))
|
Add field to render image many-to-many as checkboxes with thumbnails.
|
Add field to render image many-to-many as checkboxes with thumbnails.
|
Python
|
bsd-3-clause
|
sonofatailor/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar
|
Add field to render image many-to-many as checkboxes with thumbnails.
|
from django import forms
from django.template import Context, Template
class ProductImageMultipleChoiceField(forms.ModelMultipleChoiceField):
"""
Field that renders a ProductImage as a thumbnail and text instead of
just as text.
"""
# Using the low-level Template API instead of storing it in a separate file
# A user might want to override this, so perhaps it should be a 'partial' template
_template = Template("""
{% load thumbnail %}
{% thumbnail image.original "50x50" crop="center" as thumb %}
<img src="{{ thumb.url }}" alt="{{ image }}" /> {{ image.original }}
{% endthumbnail %}
""")
def __init__(self, *args, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = forms.CheckboxSelectMultiple()
if 'required' not in kwargs:
kwargs['required'] = False
super(ProductImageMultipleChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return self._template.render(Context({
'image': obj
}))
|
<commit_before><commit_msg>Add field to render image many-to-many as checkboxes with thumbnails.<commit_after>
|
from django import forms
from django.template import Context, Template
class ProductImageMultipleChoiceField(forms.ModelMultipleChoiceField):
"""
Field that renders a ProductImage as a thumbnail and text instead of
just as text.
"""
# Using the low-level Template API instead of storing it in a separate file
# A user might want to override this, so perhaps it should be a 'partial' template
_template = Template("""
{% load thumbnail %}
{% thumbnail image.original "50x50" crop="center" as thumb %}
<img src="{{ thumb.url }}" alt="{{ image }}" /> {{ image.original }}
{% endthumbnail %}
""")
def __init__(self, *args, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = forms.CheckboxSelectMultiple()
if 'required' not in kwargs:
kwargs['required'] = False
super(ProductImageMultipleChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return self._template.render(Context({
'image': obj
}))
|
Add field to render image many-to-many as checkboxes with thumbnails.from django import forms
from django.template import Context, Template
class ProductImageMultipleChoiceField(forms.ModelMultipleChoiceField):
"""
Field that renders a ProductImage as a thumbnail and text instead of
just as text.
"""
# Using the low-level Template API instead of storing it in a separate file
# A user might want to override this, so perhaps it should be a 'partial' template
_template = Template("""
{% load thumbnail %}
{% thumbnail image.original "50x50" crop="center" as thumb %}
<img src="{{ thumb.url }}" alt="{{ image }}" /> {{ image.original }}
{% endthumbnail %}
""")
def __init__(self, *args, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = forms.CheckboxSelectMultiple()
if 'required' not in kwargs:
kwargs['required'] = False
super(ProductImageMultipleChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return self._template.render(Context({
'image': obj
}))
|
<commit_before><commit_msg>Add field to render image many-to-many as checkboxes with thumbnails.<commit_after>from django import forms
from django.template import Context, Template
class ProductImageMultipleChoiceField(forms.ModelMultipleChoiceField):
"""
Field that renders a ProductImage as a thumbnail and text instead of
just as text.
"""
# Using the low-level Template API instead of storing it in a separate file
# A user might want to override this, so perhaps it should be a 'partial' template
_template = Template("""
{% load thumbnail %}
{% thumbnail image.original "50x50" crop="center" as thumb %}
<img src="{{ thumb.url }}" alt="{{ image }}" /> {{ image.original }}
{% endthumbnail %}
""")
def __init__(self, *args, **kwargs):
if 'widget' not in kwargs:
kwargs['widget'] = forms.CheckboxSelectMultiple()
if 'required' not in kwargs:
kwargs['required'] = False
super(ProductImageMultipleChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
return self._template.render(Context({
'image': obj
}))
|
|
767f241a930ac0dea5fd3cb7fcb3dc6837d5648b
|
tests/test_cmatrices.py
|
tests/test_cmatrices.py
|
# to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
defaultTestCases = testUtils.getTestCases()
testCases = defaultTestCases
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getKwargs())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = getattr(featureClass, 'P_%s' % featureClassName) # matrix calculated at initialization by C extension
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
|
Add testing for C extensions
|
ENH: Add testing for C extensions
Testing compares matrices calculated by python algorithms to
those calculated by the C extension. Tests the calculation of
surface area in a similar manner. Testing fails if C extension
is not available or if any element in the matrix differs more
than 1e-3 from the corresponding element in the other matrix.
|
Python
|
bsd-3-clause
|
Radiomics/pyradiomics,Radiomics/pyradiomics,Radiomics/pyradiomics,Radiomics/pyradiomics
|
ENH: Add testing for C extensions
Testing compares matrices calculated by python algorithms to
those calculated by the C extension. Tests the calculation of
surface area in a similar manner. Testing fails if C extension
is not available or if any element in the matrix differs more
than 1e-3 from the corresponding element in the other matrix.
|
# to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
defaultTestCases = testUtils.getTestCases()
testCases = defaultTestCases
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getKwargs())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = getattr(featureClass, 'P_%s' % featureClassName) # matrix calculated at initialization by C extension
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
|
<commit_before><commit_msg>ENH: Add testing for C extensions
Testing compares matrices calculated by python algorithms to
those calculated by the C extension. Tests the calculation of
surface area in a similar manner. Testing fails if C extension
is not available or if any element in the matrix differs more
than 1e-3 from the corresponding element in the other matrix.<commit_after>
|
# to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
defaultTestCases = testUtils.getTestCases()
testCases = defaultTestCases
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getKwargs())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = getattr(featureClass, 'P_%s' % featureClassName) # matrix calculated at initialization by C extension
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
|
ENH: Add testing for C extensions
Testing compares matrices calculated by python algorithms to
those calculated by the C extension. Tests the calculation of
surface area in a similar manner. Testing fails if C extension
is not available or if any element in the matrix differs more
than 1e-3 from the corresponding element in the other matrix.# to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
defaultTestCases = testUtils.getTestCases()
testCases = defaultTestCases
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getKwargs())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = getattr(featureClass, 'P_%s' % featureClassName) # matrix calculated at initialization by C extension
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
|
<commit_before><commit_msg>ENH: Add testing for C extensions
Testing compares matrices calculated by python algorithms to
those calculated by the C extension. Tests the calculation of
surface area in a similar manner. Testing fails if C extension
is not available or if any element in the matrix differs more
than 1e-3 from the corresponding element in the other matrix.<commit_after># to run this test, from directory above:
# setenv PYTHONPATH /path/to/pyradiomics/radiomics
# nosetests --nocapture -v tests/test_features.py
import logging
from nose_parameterized import parameterized
import numpy
import six
from radiomics import cMatsEnabled, getFeatureClasses
from testUtils import custom_name_func, RadiomicsTestUtils
testUtils = RadiomicsTestUtils()
defaultTestCases = testUtils.getTestCases()
testCases = defaultTestCases
featureClasses = getFeatureClasses()
class TestFeatures:
def generate_scenarios():
global testCases, featureClasses
for testCase in testCases:
for className, featureClass in six.iteritems(featureClasses):
assert(featureClass is not None)
if "_calculateCMatrix" in dir(featureClass) or className == "shape":
logging.debug('generate_scenarios: featureClass = %s', className)
yield testCase, className
global testUtils
@parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func)
def test_scenario(self, testCase, featureClassName):
print("")
global testUtils, featureClasses
logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName)
assert cMatsEnabled()
testUtils.setFeatureClassAndTestCase(featureClassName, testCase)
testImage = testUtils.getImage()
testMask = testUtils.getMask()
featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getKwargs())
if featureClassName == 'shape':
cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension
assert (cSA is not None)
pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode
assert (pySA is not None)
# Check if the calculated values match
assert (numpy.abs(pySA - cSA)) < 1e-3
else:
assert "_calculateMatrix" in dir(featureClass)
cMat = getattr(featureClass, 'P_%s' % featureClassName) # matrix calculated at initialization by C extension
assert cMat is not None
pyMat = featureClass._calculateMatrix()
assert pyMat is not None
# Check if the calculated arrays match
assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
|
|
c49f193c1ad516bfec52eab64c7dc2508d35f53d
|
app/grandchallenge/retina_core/migrations/0002_auto_20190225_1228.py
|
app/grandchallenge/retina_core/migrations/0002_auto_20190225_1228.py
|
# Generated by Django 2.1.7 on 2019-02-25 12:28
from django.db import migrations
from django.contrib.auth.models import User, Group
from django.conf import settings
from guardian.shortcuts import assign_perm, remove_perm
# Permission types
PERMISSION_TYPES = ("view", "add", "change", "delete")
# Existing annotation (name, codename) as of annotations.0001_initial
ANNOTATION_CODENAMES = (
("BooleanClassificationAnnotation", "booleanclassificationannotation"),
("CoordinateListAnnotation", "coordinatelistannotation"),
("IntegerClassificationAnnotation", "integerclassificationannotation"),
("LandmarkAnnotationSet", "landmarkannotationset"),
("MeasurementAnnotation", "measurementannotation"),
("PolygonAnnotationSet", "polygonannotationset"),
("SingleLandmarkAnnotation", "singlelandmarkannotation"),
("SinglePolygonAnnotation", "singlepolygonannotation"),
("ETDRSGridAnnotation", "etdrsgridannotation"),
)
def change_retina_permissions(apps, change_permission_func):
retina_admins = Group.objects.get(name=settings.RETINA_ADMINS_GROUP_NAME)
for (annotation_name, annotation_codename) in ANNOTATION_CODENAMES:
# Change user level object permissions to owners of annotations
Annotation = apps.get_model('annotations', annotation_name)
for annotation in Annotation.objects.all():
if annotation_name.startswith('single'):
owner = annotation.annotation_set.grader
else:
owner = annotation.grader
if owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
owner,
annotation
)
# Change group level permissions
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
retina_admins,
)
def set_retina_permissions_forward(apps, schema_editor):
change_retina_permissions(apps, assign_perm)
def set_retina_permissions_backward(apps, schema_editor):
change_retina_permissions(apps, remove_perm)
class Migration(migrations.Migration):
dependencies = [
("retina_core", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("annotations", "0001_initial"),
]
operations = [
migrations.RunPython(
set_retina_permissions_forward, set_retina_permissions_backward
)
]
|
Add migration for setting retina annotation permissions
|
Add migration for setting retina annotation permissions
|
Python
|
apache-2.0
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
Add migration for setting retina annotation permissions
|
# Generated by Django 2.1.7 on 2019-02-25 12:28
from django.db import migrations
from django.contrib.auth.models import User, Group
from django.conf import settings
from guardian.shortcuts import assign_perm, remove_perm
# Permission types
PERMISSION_TYPES = ("view", "add", "change", "delete")
# Existing annotation (name, codename) as of annotations.0001_initial
ANNOTATION_CODENAMES = (
("BooleanClassificationAnnotation", "booleanclassificationannotation"),
("CoordinateListAnnotation", "coordinatelistannotation"),
("IntegerClassificationAnnotation", "integerclassificationannotation"),
("LandmarkAnnotationSet", "landmarkannotationset"),
("MeasurementAnnotation", "measurementannotation"),
("PolygonAnnotationSet", "polygonannotationset"),
("SingleLandmarkAnnotation", "singlelandmarkannotation"),
("SinglePolygonAnnotation", "singlepolygonannotation"),
("ETDRSGridAnnotation", "etdrsgridannotation"),
)
def change_retina_permissions(apps, change_permission_func):
retina_admins = Group.objects.get(name=settings.RETINA_ADMINS_GROUP_NAME)
for (annotation_name, annotation_codename) in ANNOTATION_CODENAMES:
# Change user level object permissions to owners of annotations
Annotation = apps.get_model('annotations', annotation_name)
for annotation in Annotation.objects.all():
if annotation_name.startswith('single'):
owner = annotation.annotation_set.grader
else:
owner = annotation.grader
if owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
owner,
annotation
)
# Change group level permissions
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
retina_admins,
)
def set_retina_permissions_forward(apps, schema_editor):
change_retina_permissions(apps, assign_perm)
def set_retina_permissions_backward(apps, schema_editor):
change_retina_permissions(apps, remove_perm)
class Migration(migrations.Migration):
dependencies = [
("retina_core", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("annotations", "0001_initial"),
]
operations = [
migrations.RunPython(
set_retina_permissions_forward, set_retina_permissions_backward
)
]
|
<commit_before><commit_msg>Add migration for setting retina annotation permissions<commit_after>
|
# Generated by Django 2.1.7 on 2019-02-25 12:28
from django.db import migrations
from django.contrib.auth.models import User, Group
from django.conf import settings
from guardian.shortcuts import assign_perm, remove_perm
# Permission types
PERMISSION_TYPES = ("view", "add", "change", "delete")
# Existing annotation (name, codename) as of annotations.0001_initial
ANNOTATION_CODENAMES = (
("BooleanClassificationAnnotation", "booleanclassificationannotation"),
("CoordinateListAnnotation", "coordinatelistannotation"),
("IntegerClassificationAnnotation", "integerclassificationannotation"),
("LandmarkAnnotationSet", "landmarkannotationset"),
("MeasurementAnnotation", "measurementannotation"),
("PolygonAnnotationSet", "polygonannotationset"),
("SingleLandmarkAnnotation", "singlelandmarkannotation"),
("SinglePolygonAnnotation", "singlepolygonannotation"),
("ETDRSGridAnnotation", "etdrsgridannotation"),
)
def change_retina_permissions(apps, change_permission_func):
retina_admins = Group.objects.get(name=settings.RETINA_ADMINS_GROUP_NAME)
for (annotation_name, annotation_codename) in ANNOTATION_CODENAMES:
# Change user level object permissions to owners of annotations
Annotation = apps.get_model('annotations', annotation_name)
for annotation in Annotation.objects.all():
if annotation_name.startswith('single'):
owner = annotation.annotation_set.grader
else:
owner = annotation.grader
if owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
owner,
annotation
)
# Change group level permissions
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
retina_admins,
)
def set_retina_permissions_forward(apps, schema_editor):
change_retina_permissions(apps, assign_perm)
def set_retina_permissions_backward(apps, schema_editor):
change_retina_permissions(apps, remove_perm)
class Migration(migrations.Migration):
dependencies = [
("retina_core", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("annotations", "0001_initial"),
]
operations = [
migrations.RunPython(
set_retina_permissions_forward, set_retina_permissions_backward
)
]
|
Add migration for setting retina annotation permissions# Generated by Django 2.1.7 on 2019-02-25 12:28
from django.db import migrations
from django.contrib.auth.models import User, Group
from django.conf import settings
from guardian.shortcuts import assign_perm, remove_perm
# Permission types
PERMISSION_TYPES = ("view", "add", "change", "delete")
# Existing annotation (name, codename) as of annotations.0001_initial
ANNOTATION_CODENAMES = (
("BooleanClassificationAnnotation", "booleanclassificationannotation"),
("CoordinateListAnnotation", "coordinatelistannotation"),
("IntegerClassificationAnnotation", "integerclassificationannotation"),
("LandmarkAnnotationSet", "landmarkannotationset"),
("MeasurementAnnotation", "measurementannotation"),
("PolygonAnnotationSet", "polygonannotationset"),
("SingleLandmarkAnnotation", "singlelandmarkannotation"),
("SinglePolygonAnnotation", "singlepolygonannotation"),
("ETDRSGridAnnotation", "etdrsgridannotation"),
)
def change_retina_permissions(apps, change_permission_func):
retina_admins = Group.objects.get(name=settings.RETINA_ADMINS_GROUP_NAME)
for (annotation_name, annotation_codename) in ANNOTATION_CODENAMES:
# Change user level object permissions to owners of annotations
Annotation = apps.get_model('annotations', annotation_name)
for annotation in Annotation.objects.all():
if annotation_name.startswith('single'):
owner = annotation.annotation_set.grader
else:
owner = annotation.grader
if owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
owner,
annotation
)
# Change group level permissions
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
retina_admins,
)
def set_retina_permissions_forward(apps, schema_editor):
change_retina_permissions(apps, assign_perm)
def set_retina_permissions_backward(apps, schema_editor):
change_retina_permissions(apps, remove_perm)
class Migration(migrations.Migration):
dependencies = [
("retina_core", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("annotations", "0001_initial"),
]
operations = [
migrations.RunPython(
set_retina_permissions_forward, set_retina_permissions_backward
)
]
|
<commit_before><commit_msg>Add migration for setting retina annotation permissions<commit_after># Generated by Django 2.1.7 on 2019-02-25 12:28
from django.db import migrations
from django.contrib.auth.models import User, Group
from django.conf import settings
from guardian.shortcuts import assign_perm, remove_perm
# Permission types
PERMISSION_TYPES = ("view", "add", "change", "delete")
# Existing annotation (name, codename) as of annotations.0001_initial
ANNOTATION_CODENAMES = (
("BooleanClassificationAnnotation", "booleanclassificationannotation"),
("CoordinateListAnnotation", "coordinatelistannotation"),
("IntegerClassificationAnnotation", "integerclassificationannotation"),
("LandmarkAnnotationSet", "landmarkannotationset"),
("MeasurementAnnotation", "measurementannotation"),
("PolygonAnnotationSet", "polygonannotationset"),
("SingleLandmarkAnnotation", "singlelandmarkannotation"),
("SinglePolygonAnnotation", "singlepolygonannotation"),
("ETDRSGridAnnotation", "etdrsgridannotation"),
)
def change_retina_permissions(apps, change_permission_func):
retina_admins = Group.objects.get(name=settings.RETINA_ADMINS_GROUP_NAME)
for (annotation_name, annotation_codename) in ANNOTATION_CODENAMES:
# Change user level object permissions to owners of annotations
Annotation = apps.get_model('annotations', annotation_name)
for annotation in Annotation.objects.all():
if annotation_name.startswith('single'):
owner = annotation.annotation_set.grader
else:
owner = annotation.grader
if owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
owner,
annotation
)
# Change group level permissions
for permission_type in PERMISSION_TYPES:
change_permission_func(
f"annotations.{permission_type}_{annotation_codename}",
retina_admins,
)
def set_retina_permissions_forward(apps, schema_editor):
change_retina_permissions(apps, assign_perm)
def set_retina_permissions_backward(apps, schema_editor):
change_retina_permissions(apps, remove_perm)
class Migration(migrations.Migration):
dependencies = [
("retina_core", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("annotations", "0001_initial"),
]
operations = [
migrations.RunPython(
set_retina_permissions_forward, set_retina_permissions_backward
)
]
|
|
2a25e583933325c032c53e885b6dcac04e8061bb
|
tests/queue/test_file.py
|
tests/queue/test_file.py
|
'''
Unit tests for `tilequeue.queue.file`.
'''
from tilequeue.queue import OutputFileQueue
from tilequeue import tile
from ModestMaps.Core import Coordinate
import unittest
import StringIO
class TestQueue(unittest.TestCase):
def setUp(self):
self.test_tile_coords = [
(0, 0, 0),
(1, 2, 3),
(4, 5, 6),
(9, 3, 1),
(4, 7, 1)
]
self.test_tile_objs = [
Coordinate(*coords) for coords in self.test_tile_coords]
self.tile_coords_str = '\n'.join(
map(tile.serialize_coord, self.test_tile_objs)) + '\n'
self.tiles_fp = StringIO.StringIO()
self.queue = OutputFileQueue(self.tiles_fp)
def test_read(self):
self._write_tiles_to_file()
# Test `.read() for multiple records.`
num_to_read = 3
actual_coords = [
msg.coord for msg in self.queue.read(max_to_read=num_to_read)]
expected = self.test_tile_objs[:num_to_read]
self.assertEqual(
actual_coords, expected, 'Reading multiple records failed')
# Test `.read()` for just 1 record at a time.
for expected in self.test_tile_objs[num_to_read:]:
[actual_msg] = self.queue.read()
self.assertEqual(
actual_msg.coord, expected,
'Reading 1 record failed')
def test_enqueue_and_enqueue_batch(self):
# Test `.enqueue_batch()`.
num_to_enqueue = 3
self.assertEqual(
self.queue.enqueue_batch(self.test_tile_objs[:num_to_enqueue]),
(num_to_enqueue, 0),
'Return value of `enqueue_batch()` does not match expected'
)
# Test `.enqueue()`.
for coords in self.test_tile_objs[num_to_enqueue:]:
self.queue.enqueue(coords)
self.assertEqual(
self.tiles_fp.getvalue(),
self.tile_coords_str,
'Contents of file do not match expected')
def test_clear(self):
self._write_tiles_to_file()
self.assertEqual(
self.queue.clear(), -1,
'Return value of `clear()` does not match expected.')
self.assertEqual(
self.tiles_fp.getvalue(), '', '`clear()` did not clear the file!')
def test_close(self):
self.assertFalse(
self.tiles_fp.closed,
'Sanity check failed: the test runner\'s file pointer appears to '
'be closed. This shouldn\'t ever happen.')
self.queue.close()
self.assertTrue(self.tiles_fp.closed, 'File pointer was not closed!')
def _write_tiles_to_file(self):
self.tiles_fp.write(self.tile_coords_str)
self.tiles_fp.seek(0)
|
Add unit tests for queue.file
|
Add unit tests for queue.file
tests/queue/test_file.py
-Add a bunch of `unittest` unit tests for most/all of the
functions in `tilequeue.queue.file`, as per all of the recent
work there.
|
Python
|
mit
|
tilezen/tilequeue,mapzen/tilequeue
|
Add unit tests for queue.file
tests/queue/test_file.py
-Add a bunch of `unittest` unit tests for most/all of the
functions in `tilequeue.queue.file`, as per all of the recent
work there.
|
'''
Unit tests for `tilequeue.queue.file`.
'''
from tilequeue.queue import OutputFileQueue
from tilequeue import tile
from ModestMaps.Core import Coordinate
import unittest
import StringIO
class TestQueue(unittest.TestCase):
def setUp(self):
self.test_tile_coords = [
(0, 0, 0),
(1, 2, 3),
(4, 5, 6),
(9, 3, 1),
(4, 7, 1)
]
self.test_tile_objs = [
Coordinate(*coords) for coords in self.test_tile_coords]
self.tile_coords_str = '\n'.join(
map(tile.serialize_coord, self.test_tile_objs)) + '\n'
self.tiles_fp = StringIO.StringIO()
self.queue = OutputFileQueue(self.tiles_fp)
def test_read(self):
self._write_tiles_to_file()
# Test `.read() for multiple records.`
num_to_read = 3
actual_coords = [
msg.coord for msg in self.queue.read(max_to_read=num_to_read)]
expected = self.test_tile_objs[:num_to_read]
self.assertEqual(
actual_coords, expected, 'Reading multiple records failed')
# Test `.read()` for just 1 record at a time.
for expected in self.test_tile_objs[num_to_read:]:
[actual_msg] = self.queue.read()
self.assertEqual(
actual_msg.coord, expected,
'Reading 1 record failed')
def test_enqueue_and_enqueue_batch(self):
# Test `.enqueue_batch()`.
num_to_enqueue = 3
self.assertEqual(
self.queue.enqueue_batch(self.test_tile_objs[:num_to_enqueue]),
(num_to_enqueue, 0),
'Return value of `enqueue_batch()` does not match expected'
)
# Test `.enqueue()`.
for coords in self.test_tile_objs[num_to_enqueue:]:
self.queue.enqueue(coords)
self.assertEqual(
self.tiles_fp.getvalue(),
self.tile_coords_str,
'Contents of file do not match expected')
def test_clear(self):
self._write_tiles_to_file()
self.assertEqual(
self.queue.clear(), -1,
'Return value of `clear()` does not match expected.')
self.assertEqual(
self.tiles_fp.getvalue(), '', '`clear()` did not clear the file!')
def test_close(self):
self.assertFalse(
self.tiles_fp.closed,
'Sanity check failed: the test runner\'s file pointer appears to '
'be closed. This shouldn\'t ever happen.')
self.queue.close()
self.assertTrue(self.tiles_fp.closed, 'File pointer was not closed!')
def _write_tiles_to_file(self):
self.tiles_fp.write(self.tile_coords_str)
self.tiles_fp.seek(0)
|
<commit_before><commit_msg>Add unit tests for queue.file
tests/queue/test_file.py
-Add a bunch of `unittest` unit tests for most/all of the
functions in `tilequeue.queue.file`, as per all of the recent
work there.<commit_after>
|
'''
Unit tests for `tilequeue.queue.file`.
'''
from tilequeue.queue import OutputFileQueue
from tilequeue import tile
from ModestMaps.Core import Coordinate
import unittest
import StringIO
class TestQueue(unittest.TestCase):
def setUp(self):
self.test_tile_coords = [
(0, 0, 0),
(1, 2, 3),
(4, 5, 6),
(9, 3, 1),
(4, 7, 1)
]
self.test_tile_objs = [
Coordinate(*coords) for coords in self.test_tile_coords]
self.tile_coords_str = '\n'.join(
map(tile.serialize_coord, self.test_tile_objs)) + '\n'
self.tiles_fp = StringIO.StringIO()
self.queue = OutputFileQueue(self.tiles_fp)
def test_read(self):
self._write_tiles_to_file()
# Test `.read() for multiple records.`
num_to_read = 3
actual_coords = [
msg.coord for msg in self.queue.read(max_to_read=num_to_read)]
expected = self.test_tile_objs[:num_to_read]
self.assertEqual(
actual_coords, expected, 'Reading multiple records failed')
# Test `.read()` for just 1 record at a time.
for expected in self.test_tile_objs[num_to_read:]:
[actual_msg] = self.queue.read()
self.assertEqual(
actual_msg.coord, expected,
'Reading 1 record failed')
def test_enqueue_and_enqueue_batch(self):
# Test `.enqueue_batch()`.
num_to_enqueue = 3
self.assertEqual(
self.queue.enqueue_batch(self.test_tile_objs[:num_to_enqueue]),
(num_to_enqueue, 0),
'Return value of `enqueue_batch()` does not match expected'
)
# Test `.enqueue()`.
for coords in self.test_tile_objs[num_to_enqueue:]:
self.queue.enqueue(coords)
self.assertEqual(
self.tiles_fp.getvalue(),
self.tile_coords_str,
'Contents of file do not match expected')
def test_clear(self):
self._write_tiles_to_file()
self.assertEqual(
self.queue.clear(), -1,
'Return value of `clear()` does not match expected.')
self.assertEqual(
self.tiles_fp.getvalue(), '', '`clear()` did not clear the file!')
def test_close(self):
self.assertFalse(
self.tiles_fp.closed,
'Sanity check failed: the test runner\'s file pointer appears to '
'be closed. This shouldn\'t ever happen.')
self.queue.close()
self.assertTrue(self.tiles_fp.closed, 'File pointer was not closed!')
def _write_tiles_to_file(self):
self.tiles_fp.write(self.tile_coords_str)
self.tiles_fp.seek(0)
|
Add unit tests for queue.file
tests/queue/test_file.py
-Add a bunch of `unittest` unit tests for most/all of the
functions in `tilequeue.queue.file`, as per all of the recent
work there.'''
Unit tests for `tilequeue.queue.file`.
'''
from tilequeue.queue import OutputFileQueue
from tilequeue import tile
from ModestMaps.Core import Coordinate
import unittest
import StringIO
class TestQueue(unittest.TestCase):
def setUp(self):
self.test_tile_coords = [
(0, 0, 0),
(1, 2, 3),
(4, 5, 6),
(9, 3, 1),
(4, 7, 1)
]
self.test_tile_objs = [
Coordinate(*coords) for coords in self.test_tile_coords]
self.tile_coords_str = '\n'.join(
map(tile.serialize_coord, self.test_tile_objs)) + '\n'
self.tiles_fp = StringIO.StringIO()
self.queue = OutputFileQueue(self.tiles_fp)
def test_read(self):
self._write_tiles_to_file()
# Test `.read() for multiple records.`
num_to_read = 3
actual_coords = [
msg.coord for msg in self.queue.read(max_to_read=num_to_read)]
expected = self.test_tile_objs[:num_to_read]
self.assertEqual(
actual_coords, expected, 'Reading multiple records failed')
# Test `.read()` for just 1 record at a time.
for expected in self.test_tile_objs[num_to_read:]:
[actual_msg] = self.queue.read()
self.assertEqual(
actual_msg.coord, expected,
'Reading 1 record failed')
def test_enqueue_and_enqueue_batch(self):
# Test `.enqueue_batch()`.
num_to_enqueue = 3
self.assertEqual(
self.queue.enqueue_batch(self.test_tile_objs[:num_to_enqueue]),
(num_to_enqueue, 0),
'Return value of `enqueue_batch()` does not match expected'
)
# Test `.enqueue()`.
for coords in self.test_tile_objs[num_to_enqueue:]:
self.queue.enqueue(coords)
self.assertEqual(
self.tiles_fp.getvalue(),
self.tile_coords_str,
'Contents of file do not match expected')
def test_clear(self):
self._write_tiles_to_file()
self.assertEqual(
self.queue.clear(), -1,
'Return value of `clear()` does not match expected.')
self.assertEqual(
self.tiles_fp.getvalue(), '', '`clear()` did not clear the file!')
def test_close(self):
self.assertFalse(
self.tiles_fp.closed,
'Sanity check failed: the test runner\'s file pointer appears to '
'be closed. This shouldn\'t ever happen.')
self.queue.close()
self.assertTrue(self.tiles_fp.closed, 'File pointer was not closed!')
def _write_tiles_to_file(self):
self.tiles_fp.write(self.tile_coords_str)
self.tiles_fp.seek(0)
|
<commit_before><commit_msg>Add unit tests for queue.file
tests/queue/test_file.py
-Add a bunch of `unittest` unit tests for most/all of the
functions in `tilequeue.queue.file`, as per all of the recent
work there.<commit_after>'''
Unit tests for `tilequeue.queue.file`.
'''
from tilequeue.queue import OutputFileQueue
from tilequeue import tile
from ModestMaps.Core import Coordinate
import unittest
import StringIO
class TestQueue(unittest.TestCase):
def setUp(self):
self.test_tile_coords = [
(0, 0, 0),
(1, 2, 3),
(4, 5, 6),
(9, 3, 1),
(4, 7, 1)
]
self.test_tile_objs = [
Coordinate(*coords) for coords in self.test_tile_coords]
self.tile_coords_str = '\n'.join(
map(tile.serialize_coord, self.test_tile_objs)) + '\n'
self.tiles_fp = StringIO.StringIO()
self.queue = OutputFileQueue(self.tiles_fp)
def test_read(self):
self._write_tiles_to_file()
# Test `.read() for multiple records.`
num_to_read = 3
actual_coords = [
msg.coord for msg in self.queue.read(max_to_read=num_to_read)]
expected = self.test_tile_objs[:num_to_read]
self.assertEqual(
actual_coords, expected, 'Reading multiple records failed')
# Test `.read()` for just 1 record at a time.
for expected in self.test_tile_objs[num_to_read:]:
[actual_msg] = self.queue.read()
self.assertEqual(
actual_msg.coord, expected,
'Reading 1 record failed')
def test_enqueue_and_enqueue_batch(self):
# Test `.enqueue_batch()`.
num_to_enqueue = 3
self.assertEqual(
self.queue.enqueue_batch(self.test_tile_objs[:num_to_enqueue]),
(num_to_enqueue, 0),
'Return value of `enqueue_batch()` does not match expected'
)
# Test `.enqueue()`.
for coords in self.test_tile_objs[num_to_enqueue:]:
self.queue.enqueue(coords)
self.assertEqual(
self.tiles_fp.getvalue(),
self.tile_coords_str,
'Contents of file do not match expected')
def test_clear(self):
self._write_tiles_to_file()
self.assertEqual(
self.queue.clear(), -1,
'Return value of `clear()` does not match expected.')
self.assertEqual(
self.tiles_fp.getvalue(), '', '`clear()` did not clear the file!')
def test_close(self):
self.assertFalse(
self.tiles_fp.closed,
'Sanity check failed: the test runner\'s file pointer appears to '
'be closed. This shouldn\'t ever happen.')
self.queue.close()
self.assertTrue(self.tiles_fp.closed, 'File pointer was not closed!')
def _write_tiles_to_file(self):
self.tiles_fp.write(self.tile_coords_str)
self.tiles_fp.seek(0)
|
|
f89ba4c5a9ada2ce84c9d46d6100fc5aec13f758
|
examples/stories/movie_lister/apps_db_csv.py
|
examples/stories/movie_lister/apps_db_csv.py
|
"""A naive example of dependency injection in Python.
Example implementation of dependency injection in Python from Martin Fowler's
article about dependency injection and inversion of control:
http://www.martinfowler.com/articles/injection.html
This mini application uses ``movies`` library, that is configured to work with
csv file movies database.
"""
import sqlite3
from dependency_injector import catalogs
from dependency_injector import providers
from dependency_injector import injections
from movies import MoviesModule
from movies import finders
from settings import MOVIES_CSV_PATH
from settings import MOVIES_DB_PATH
class ApplicationModule(catalogs.DeclarativeCatalog):
"""Catalog of application component providers."""
database = providers.Singleton(sqlite3.connect, MOVIES_DB_PATH)
@catalogs.copy(MoviesModule)
class DbMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.SqliteMovieFinder,
*MoviesModule.movie_finder.injections,
database=ApplicationModule.database)
@catalogs.copy(MoviesModule)
class CsvMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.CsvMovieFinder,
*MoviesModule.movie_finder.injections,
csv_file=MOVIES_CSV_PATH,
delimeter=',')
@injections.inject(db_movie_lister=DbMoviesModule.movie_lister)
@injections.inject(csv_movie_lister=CsvMoviesModule.movie_lister)
def main(db_movie_lister, csv_movie_lister):
"""Main function.
This program prints info about all movies that were directed by different
persons and then prints all movies that were released in 2015.
:param db_movie_lister: Database movie lister instance
:type db_movie_lister: movies.listers.MovieLister
:param csv_movie_lister: Database movie lister instance
:type csv_movie_lister: movies.listers.MovieLister
"""
print db_movie_lister.movies_directed_by('Francis Lawrence')
print db_movie_lister.movies_directed_by('Patricia Riggen')
print db_movie_lister.movies_directed_by('JJ Abrams')
print db_movie_lister.movies_released_in(2015)
print csv_movie_lister.movies_directed_by('Francis Lawrence')
print csv_movie_lister.movies_directed_by('Patricia Riggen')
print csv_movie_lister.movies_directed_by('JJ Abrams')
print csv_movie_lister.movies_released_in(2015)
if __name__ == '__main__':
main()
|
Add example of catalog copying into MovieLister example
|
Add example of catalog copying into MovieLister example
|
Python
|
bsd-3-clause
|
rmk135/dependency_injector,rmk135/objects,ets-labs/python-dependency-injector,ets-labs/dependency_injector
|
Add example of catalog copying into MovieLister example
|
"""A naive example of dependency injection in Python.
Example implementation of dependency injection in Python from Martin Fowler's
article about dependency injection and inversion of control:
http://www.martinfowler.com/articles/injection.html
This mini application uses ``movies`` library, that is configured to work with
csv file movies database.
"""
import sqlite3
from dependency_injector import catalogs
from dependency_injector import providers
from dependency_injector import injections
from movies import MoviesModule
from movies import finders
from settings import MOVIES_CSV_PATH
from settings import MOVIES_DB_PATH
class ApplicationModule(catalogs.DeclarativeCatalog):
"""Catalog of application component providers."""
database = providers.Singleton(sqlite3.connect, MOVIES_DB_PATH)
@catalogs.copy(MoviesModule)
class DbMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.SqliteMovieFinder,
*MoviesModule.movie_finder.injections,
database=ApplicationModule.database)
@catalogs.copy(MoviesModule)
class CsvMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.CsvMovieFinder,
*MoviesModule.movie_finder.injections,
csv_file=MOVIES_CSV_PATH,
delimeter=',')
@injections.inject(db_movie_lister=DbMoviesModule.movie_lister)
@injections.inject(csv_movie_lister=CsvMoviesModule.movie_lister)
def main(db_movie_lister, csv_movie_lister):
"""Main function.
This program prints info about all movies that were directed by different
persons and then prints all movies that were released in 2015.
:param db_movie_lister: Database movie lister instance
:type db_movie_lister: movies.listers.MovieLister
:param csv_movie_lister: Database movie lister instance
:type csv_movie_lister: movies.listers.MovieLister
"""
print db_movie_lister.movies_directed_by('Francis Lawrence')
print db_movie_lister.movies_directed_by('Patricia Riggen')
print db_movie_lister.movies_directed_by('JJ Abrams')
print db_movie_lister.movies_released_in(2015)
print csv_movie_lister.movies_directed_by('Francis Lawrence')
print csv_movie_lister.movies_directed_by('Patricia Riggen')
print csv_movie_lister.movies_directed_by('JJ Abrams')
print csv_movie_lister.movies_released_in(2015)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example of catalog copying into MovieLister example<commit_after>
|
"""A naive example of dependency injection in Python.
Example implementation of dependency injection in Python from Martin Fowler's
article about dependency injection and inversion of control:
http://www.martinfowler.com/articles/injection.html
This mini application uses ``movies`` library, that is configured to work with
csv file movies database.
"""
import sqlite3
from dependency_injector import catalogs
from dependency_injector import providers
from dependency_injector import injections
from movies import MoviesModule
from movies import finders
from settings import MOVIES_CSV_PATH
from settings import MOVIES_DB_PATH
class ApplicationModule(catalogs.DeclarativeCatalog):
"""Catalog of application component providers."""
database = providers.Singleton(sqlite3.connect, MOVIES_DB_PATH)
@catalogs.copy(MoviesModule)
class DbMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.SqliteMovieFinder,
*MoviesModule.movie_finder.injections,
database=ApplicationModule.database)
@catalogs.copy(MoviesModule)
class CsvMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.CsvMovieFinder,
*MoviesModule.movie_finder.injections,
csv_file=MOVIES_CSV_PATH,
delimeter=',')
@injections.inject(db_movie_lister=DbMoviesModule.movie_lister)
@injections.inject(csv_movie_lister=CsvMoviesModule.movie_lister)
def main(db_movie_lister, csv_movie_lister):
"""Main function.
This program prints info about all movies that were directed by different
persons and then prints all movies that were released in 2015.
:param db_movie_lister: Database movie lister instance
:type db_movie_lister: movies.listers.MovieLister
:param csv_movie_lister: Database movie lister instance
:type csv_movie_lister: movies.listers.MovieLister
"""
print db_movie_lister.movies_directed_by('Francis Lawrence')
print db_movie_lister.movies_directed_by('Patricia Riggen')
print db_movie_lister.movies_directed_by('JJ Abrams')
print db_movie_lister.movies_released_in(2015)
print csv_movie_lister.movies_directed_by('Francis Lawrence')
print csv_movie_lister.movies_directed_by('Patricia Riggen')
print csv_movie_lister.movies_directed_by('JJ Abrams')
print csv_movie_lister.movies_released_in(2015)
if __name__ == '__main__':
main()
|
Add example of catalog copying into MovieLister example"""A naive example of dependency injection in Python.
Example implementation of dependency injection in Python from Martin Fowler's
article about dependency injection and inversion of control:
http://www.martinfowler.com/articles/injection.html
This mini application uses ``movies`` library, that is configured to work with
csv file movies database.
"""
import sqlite3
from dependency_injector import catalogs
from dependency_injector import providers
from dependency_injector import injections
from movies import MoviesModule
from movies import finders
from settings import MOVIES_CSV_PATH
from settings import MOVIES_DB_PATH
class ApplicationModule(catalogs.DeclarativeCatalog):
"""Catalog of application component providers."""
database = providers.Singleton(sqlite3.connect, MOVIES_DB_PATH)
@catalogs.copy(MoviesModule)
class DbMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.SqliteMovieFinder,
*MoviesModule.movie_finder.injections,
database=ApplicationModule.database)
@catalogs.copy(MoviesModule)
class CsvMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.CsvMovieFinder,
*MoviesModule.movie_finder.injections,
csv_file=MOVIES_CSV_PATH,
delimeter=',')
@injections.inject(db_movie_lister=DbMoviesModule.movie_lister)
@injections.inject(csv_movie_lister=CsvMoviesModule.movie_lister)
def main(db_movie_lister, csv_movie_lister):
"""Main function.
This program prints info about all movies that were directed by different
persons and then prints all movies that were released in 2015.
:param db_movie_lister: Database movie lister instance
:type db_movie_lister: movies.listers.MovieLister
:param csv_movie_lister: Database movie lister instance
:type csv_movie_lister: movies.listers.MovieLister
"""
print db_movie_lister.movies_directed_by('Francis Lawrence')
print db_movie_lister.movies_directed_by('Patricia Riggen')
print db_movie_lister.movies_directed_by('JJ Abrams')
print db_movie_lister.movies_released_in(2015)
print csv_movie_lister.movies_directed_by('Francis Lawrence')
print csv_movie_lister.movies_directed_by('Patricia Riggen')
print csv_movie_lister.movies_directed_by('JJ Abrams')
print csv_movie_lister.movies_released_in(2015)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example of catalog copying into MovieLister example<commit_after>"""A naive example of dependency injection in Python.
Example implementation of dependency injection in Python from Martin Fowler's
article about dependency injection and inversion of control:
http://www.martinfowler.com/articles/injection.html
This mini application uses ``movies`` library, that is configured to work with
csv file movies database.
"""
import sqlite3
from dependency_injector import catalogs
from dependency_injector import providers
from dependency_injector import injections
from movies import MoviesModule
from movies import finders
from settings import MOVIES_CSV_PATH
from settings import MOVIES_DB_PATH
class ApplicationModule(catalogs.DeclarativeCatalog):
"""Catalog of application component providers."""
database = providers.Singleton(sqlite3.connect, MOVIES_DB_PATH)
@catalogs.copy(MoviesModule)
class DbMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.SqliteMovieFinder,
*MoviesModule.movie_finder.injections,
database=ApplicationModule.database)
@catalogs.copy(MoviesModule)
class CsvMoviesModule(MoviesModule):
"""Customized catalog of movies module component providers."""
movie_finder = providers.Factory(finders.CsvMovieFinder,
*MoviesModule.movie_finder.injections,
csv_file=MOVIES_CSV_PATH,
delimeter=',')
@injections.inject(db_movie_lister=DbMoviesModule.movie_lister)
@injections.inject(csv_movie_lister=CsvMoviesModule.movie_lister)
def main(db_movie_lister, csv_movie_lister):
"""Main function.
This program prints info about all movies that were directed by different
persons and then prints all movies that were released in 2015.
:param db_movie_lister: Database movie lister instance
:type db_movie_lister: movies.listers.MovieLister
:param csv_movie_lister: Database movie lister instance
:type csv_movie_lister: movies.listers.MovieLister
"""
print db_movie_lister.movies_directed_by('Francis Lawrence')
print db_movie_lister.movies_directed_by('Patricia Riggen')
print db_movie_lister.movies_directed_by('JJ Abrams')
print db_movie_lister.movies_released_in(2015)
print csv_movie_lister.movies_directed_by('Francis Lawrence')
print csv_movie_lister.movies_directed_by('Patricia Riggen')
print csv_movie_lister.movies_directed_by('JJ Abrams')
print csv_movie_lister.movies_released_in(2015)
if __name__ == '__main__':
main()
|
|
7fc5855aa3aa7e554aeb6faf9ae05567c7a4910c
|
monitor_website_status.py
|
monitor_website_status.py
|
#!/usr/bin/python
import urllib2
from BeautifulSoup import BeautifulSoup
from StringIO import StringIO
from datetime import datetime, timedelta
import subprocess
WEBSITE = 'http://www.newsdiffs.org/browse/'
MAX_TIME = timedelta(hours=1)
EMAILS = 'ecprice@mit.edu jenny8lee@gmail.com price@mit.edu'.split()
def send_alert_email(subject, body):
email = 'Subject: %s\n\n%s' % (subject, body)
p = subprocess.Popen(['/usr/bin/msmtp', '-t'] + EMAILS,
stdin=subprocess.PIPE)
p.communicate(email)
if p.wait():
print 'Bad return code:', p.returncode
def get_update_time():
html = urllib2.urlopen(WEBSITE)
soup = BeautifulSoup(html)
datestr = soup.findAll('td')[1].findChild('a').getText()
date = datetime.strptime(datestr.replace('.', ''), '%B %d, %Y, %I:%M %p')
return date
if __name__ == '__main__':
try:
update_time = get_update_time()
time_since_update = datetime.now() - update_time
print 'Update time:', time_since_update
if time_since_update > MAX_TIME:
send_alert_email('Trouble with newsdiffs.org',
'No updates since %s\n%s is too long' %
(update_time, time_since_update))
except Exception, e:
import traceback
traceback.print_exc()
send_alert_email('Trouble with newsdiffs.org',
'Cannot check website\n%s' % traceback.format_exc())
|
Add monitor script for website.
|
Add monitor script for website.
|
Python
|
mit
|
bjowi/newsdiffs,flupzor/newsdiffs,amandabee/newsdiffs,flupzor/bijgeschaafd,bjowi/newsdiffs,flupzor/bijgeschaafd,catcosmo/newsdiffs,flupzor/newsdiffs,catcosmo/newsdiffs,catcosmo/newsdiffs,flupzor/bijgeschaafd,COLABORATI/newsdiffs,COLABORATI/newsdiffs,flupzor/newsdiffs,bjowi/newsdiffs,flupzor/bijgeschaafd,flupzor/newsdiffs,amandabee/newsdiffs,COLABORATI/newsdiffs,amandabee/newsdiffs
|
Add monitor script for website.
|
#!/usr/bin/python
import urllib2
from BeautifulSoup import BeautifulSoup
from StringIO import StringIO
from datetime import datetime, timedelta
import subprocess
WEBSITE = 'http://www.newsdiffs.org/browse/'
MAX_TIME = timedelta(hours=1)
EMAILS = 'ecprice@mit.edu jenny8lee@gmail.com price@mit.edu'.split()
def send_alert_email(subject, body):
email = 'Subject: %s\n\n%s' % (subject, body)
p = subprocess.Popen(['/usr/bin/msmtp', '-t'] + EMAILS,
stdin=subprocess.PIPE)
p.communicate(email)
if p.wait():
print 'Bad return code:', p.returncode
def get_update_time():
html = urllib2.urlopen(WEBSITE)
soup = BeautifulSoup(html)
datestr = soup.findAll('td')[1].findChild('a').getText()
date = datetime.strptime(datestr.replace('.', ''), '%B %d, %Y, %I:%M %p')
return date
if __name__ == '__main__':
try:
update_time = get_update_time()
time_since_update = datetime.now() - update_time
print 'Update time:', time_since_update
if time_since_update > MAX_TIME:
send_alert_email('Trouble with newsdiffs.org',
'No updates since %s\n%s is too long' %
(update_time, time_since_update))
except Exception, e:
import traceback
traceback.print_exc()
send_alert_email('Trouble with newsdiffs.org',
'Cannot check website\n%s' % traceback.format_exc())
|
<commit_before><commit_msg>Add monitor script for website.<commit_after>
|
#!/usr/bin/python
import urllib2
from BeautifulSoup import BeautifulSoup
from StringIO import StringIO
from datetime import datetime, timedelta
import subprocess
WEBSITE = 'http://www.newsdiffs.org/browse/'
MAX_TIME = timedelta(hours=1)
EMAILS = 'ecprice@mit.edu jenny8lee@gmail.com price@mit.edu'.split()
def send_alert_email(subject, body):
email = 'Subject: %s\n\n%s' % (subject, body)
p = subprocess.Popen(['/usr/bin/msmtp', '-t'] + EMAILS,
stdin=subprocess.PIPE)
p.communicate(email)
if p.wait():
print 'Bad return code:', p.returncode
def get_update_time():
html = urllib2.urlopen(WEBSITE)
soup = BeautifulSoup(html)
datestr = soup.findAll('td')[1].findChild('a').getText()
date = datetime.strptime(datestr.replace('.', ''), '%B %d, %Y, %I:%M %p')
return date
if __name__ == '__main__':
try:
update_time = get_update_time()
time_since_update = datetime.now() - update_time
print 'Update time:', time_since_update
if time_since_update > MAX_TIME:
send_alert_email('Trouble with newsdiffs.org',
'No updates since %s\n%s is too long' %
(update_time, time_since_update))
except Exception, e:
import traceback
traceback.print_exc()
send_alert_email('Trouble with newsdiffs.org',
'Cannot check website\n%s' % traceback.format_exc())
|
Add monitor script for website.#!/usr/bin/python
import urllib2
from BeautifulSoup import BeautifulSoup
from StringIO import StringIO
from datetime import datetime, timedelta
import subprocess
WEBSITE = 'http://www.newsdiffs.org/browse/'
MAX_TIME = timedelta(hours=1)
EMAILS = 'ecprice@mit.edu jenny8lee@gmail.com price@mit.edu'.split()
def send_alert_email(subject, body):
email = 'Subject: %s\n\n%s' % (subject, body)
p = subprocess.Popen(['/usr/bin/msmtp', '-t'] + EMAILS,
stdin=subprocess.PIPE)
p.communicate(email)
if p.wait():
print 'Bad return code:', p.returncode
def get_update_time():
html = urllib2.urlopen(WEBSITE)
soup = BeautifulSoup(html)
datestr = soup.findAll('td')[1].findChild('a').getText()
date = datetime.strptime(datestr.replace('.', ''), '%B %d, %Y, %I:%M %p')
return date
if __name__ == '__main__':
try:
update_time = get_update_time()
time_since_update = datetime.now() - update_time
print 'Update time:', time_since_update
if time_since_update > MAX_TIME:
send_alert_email('Trouble with newsdiffs.org',
'No updates since %s\n%s is too long' %
(update_time, time_since_update))
except Exception, e:
import traceback
traceback.print_exc()
send_alert_email('Trouble with newsdiffs.org',
'Cannot check website\n%s' % traceback.format_exc())
|
<commit_before><commit_msg>Add monitor script for website.<commit_after>#!/usr/bin/python
import urllib2
from BeautifulSoup import BeautifulSoup
from StringIO import StringIO
from datetime import datetime, timedelta
import subprocess
WEBSITE = 'http://www.newsdiffs.org/browse/'
MAX_TIME = timedelta(hours=1)
EMAILS = 'ecprice@mit.edu jenny8lee@gmail.com price@mit.edu'.split()
def send_alert_email(subject, body):
email = 'Subject: %s\n\n%s' % (subject, body)
p = subprocess.Popen(['/usr/bin/msmtp', '-t'] + EMAILS,
stdin=subprocess.PIPE)
p.communicate(email)
if p.wait():
print 'Bad return code:', p.returncode
def get_update_time():
html = urllib2.urlopen(WEBSITE)
soup = BeautifulSoup(html)
datestr = soup.findAll('td')[1].findChild('a').getText()
date = datetime.strptime(datestr.replace('.', ''), '%B %d, %Y, %I:%M %p')
return date
if __name__ == '__main__':
try:
update_time = get_update_time()
time_since_update = datetime.now() - update_time
print 'Update time:', time_since_update
if time_since_update > MAX_TIME:
send_alert_email('Trouble with newsdiffs.org',
'No updates since %s\n%s is too long' %
(update_time, time_since_update))
except Exception, e:
import traceback
traceback.print_exc()
send_alert_email('Trouble with newsdiffs.org',
'Cannot check website\n%s' % traceback.format_exc())
|
|
1340ff25daef84e66841476eaff9446886cda0b5
|
minsem.py
|
minsem.py
|
#!/usr/bin/env python
from collections import OrderedDict
class DataToken:
def __init__(self, line: str):
(
self.offset,
self.word,
self.lowercase_lemma,
self.pos_tag,
self.mwe_tag,
self.parent_offset,
self.strength,
self.supersense,
self.sentence_id
) = line.split('\t')
class DataSentence:
def __init__(self):
self._tokens: OrderedDict = OrderedDict()
self.sentence_id = None
def __iter__(self):
return iter(self._tokens)
def __getitem__(self, item):
return self._tokens[item]
def __bool__(self):
return bool(self._tokens)
def append(self, token: DataToken):
if not self.sentence_id:
self.sentence_id = token.sentence_id
else:
if token.sentence_id != self.sentence_id:
raise ValueError("sentence IDs do not match: {} (sentence) != {} (token)".format(self.sentence_id,
token.sentence_id))
self._tokens[int(token.offset)] = token
@property
def sentence(self):
return ' '.join(map(lambda t: t.word, self._tokens.values()))
def read_data_file(datafile: str):
data_sentences = {}
with open(datafile) as df:
sentence = DataSentence()
for line in df:
line = line.strip(' \n')
if not line:
# An empty line indicates the end of a sentence.
if sentence:
data_sentences[sentence.sentence_id] = sentence
sentence = DataSentence()
else:
# A new token to be accumulated!
token = DataToken(line)
sentence.append(token)
# Check if there is a valid sentence; this would happen if the file does not end in a newline.
if sentence:
data_sentences[sentence.sentence_id] = sentence
return data_sentences
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('training-data', help='the datafile to train with')
parser.add_argument('testing-data', help='the datafile to test with')
args = parser.parse_args()
# Read the data.
training_sentences = read_data_file(args.training_data)
testing_sentences = read_data_file(args.testing_data)
|
Implement reading from data files
|
Implement reading from data files
Reads files and stores contents in new DataSentence/DataToken constructions.
|
Python
|
mit
|
pdarragh/MinSem
|
Implement reading from data files
Reads files and stores contents in new DataSentence/DataToken constructions.
|
#!/usr/bin/env python
from collections import OrderedDict
class DataToken:
def __init__(self, line: str):
(
self.offset,
self.word,
self.lowercase_lemma,
self.pos_tag,
self.mwe_tag,
self.parent_offset,
self.strength,
self.supersense,
self.sentence_id
) = line.split('\t')
class DataSentence:
def __init__(self):
self._tokens: OrderedDict = OrderedDict()
self.sentence_id = None
def __iter__(self):
return iter(self._tokens)
def __getitem__(self, item):
return self._tokens[item]
def __bool__(self):
return bool(self._tokens)
def append(self, token: DataToken):
if not self.sentence_id:
self.sentence_id = token.sentence_id
else:
if token.sentence_id != self.sentence_id:
raise ValueError("sentence IDs do not match: {} (sentence) != {} (token)".format(self.sentence_id,
token.sentence_id))
self._tokens[int(token.offset)] = token
@property
def sentence(self):
return ' '.join(map(lambda t: t.word, self._tokens.values()))
def read_data_file(datafile: str):
data_sentences = {}
with open(datafile) as df:
sentence = DataSentence()
for line in df:
line = line.strip(' \n')
if not line:
# An empty line indicates the end of a sentence.
if sentence:
data_sentences[sentence.sentence_id] = sentence
sentence = DataSentence()
else:
# A new token to be accumulated!
token = DataToken(line)
sentence.append(token)
# Check if there is a valid sentence; this would happen if the file does not end in a newline.
if sentence:
data_sentences[sentence.sentence_id] = sentence
return data_sentences
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('training-data', help='the datafile to train with')
parser.add_argument('testing-data', help='the datafile to test with')
args = parser.parse_args()
# Read the data.
training_sentences = read_data_file(args.training_data)
testing_sentences = read_data_file(args.testing_data)
|
<commit_before><commit_msg>Implement reading from data files
Reads files and stores contents in new DataSentence/DataToken constructions.<commit_after>
|
#!/usr/bin/env python
from collections import OrderedDict
class DataToken:
def __init__(self, line: str):
(
self.offset,
self.word,
self.lowercase_lemma,
self.pos_tag,
self.mwe_tag,
self.parent_offset,
self.strength,
self.supersense,
self.sentence_id
) = line.split('\t')
class DataSentence:
def __init__(self):
self._tokens: OrderedDict = OrderedDict()
self.sentence_id = None
def __iter__(self):
return iter(self._tokens)
def __getitem__(self, item):
return self._tokens[item]
def __bool__(self):
return bool(self._tokens)
def append(self, token: DataToken):
if not self.sentence_id:
self.sentence_id = token.sentence_id
else:
if token.sentence_id != self.sentence_id:
raise ValueError("sentence IDs do not match: {} (sentence) != {} (token)".format(self.sentence_id,
token.sentence_id))
self._tokens[int(token.offset)] = token
@property
def sentence(self):
return ' '.join(map(lambda t: t.word, self._tokens.values()))
def read_data_file(datafile: str):
data_sentences = {}
with open(datafile) as df:
sentence = DataSentence()
for line in df:
line = line.strip(' \n')
if not line:
# An empty line indicates the end of a sentence.
if sentence:
data_sentences[sentence.sentence_id] = sentence
sentence = DataSentence()
else:
# A new token to be accumulated!
token = DataToken(line)
sentence.append(token)
# Check if there is a valid sentence; this would happen if the file does not end in a newline.
if sentence:
data_sentences[sentence.sentence_id] = sentence
return data_sentences
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('training-data', help='the datafile to train with')
parser.add_argument('testing-data', help='the datafile to test with')
args = parser.parse_args()
# Read the data.
training_sentences = read_data_file(args.training_data)
testing_sentences = read_data_file(args.testing_data)
|
Implement reading from data files
Reads files and stores contents in new DataSentence/DataToken constructions.#!/usr/bin/env python
from collections import OrderedDict
class DataToken:
def __init__(self, line: str):
(
self.offset,
self.word,
self.lowercase_lemma,
self.pos_tag,
self.mwe_tag,
self.parent_offset,
self.strength,
self.supersense,
self.sentence_id
) = line.split('\t')
class DataSentence:
def __init__(self):
self._tokens: OrderedDict = OrderedDict()
self.sentence_id = None
def __iter__(self):
return iter(self._tokens)
def __getitem__(self, item):
return self._tokens[item]
def __bool__(self):
return bool(self._tokens)
def append(self, token: DataToken):
if not self.sentence_id:
self.sentence_id = token.sentence_id
else:
if token.sentence_id != self.sentence_id:
raise ValueError("sentence IDs do not match: {} (sentence) != {} (token)".format(self.sentence_id,
token.sentence_id))
self._tokens[int(token.offset)] = token
@property
def sentence(self):
return ' '.join(map(lambda t: t.word, self._tokens.values()))
def read_data_file(datafile: str):
data_sentences = {}
with open(datafile) as df:
sentence = DataSentence()
for line in df:
line = line.strip(' \n')
if not line:
# An empty line indicates the end of a sentence.
if sentence:
data_sentences[sentence.sentence_id] = sentence
sentence = DataSentence()
else:
# A new token to be accumulated!
token = DataToken(line)
sentence.append(token)
# Check if there is a valid sentence; this would happen if the file does not end in a newline.
if sentence:
data_sentences[sentence.sentence_id] = sentence
return data_sentences
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('training-data', help='the datafile to train with')
parser.add_argument('testing-data', help='the datafile to test with')
args = parser.parse_args()
# Read the data.
training_sentences = read_data_file(args.training_data)
testing_sentences = read_data_file(args.testing_data)
|
<commit_before><commit_msg>Implement reading from data files
Reads files and stores contents in new DataSentence/DataToken constructions.<commit_after>#!/usr/bin/env python
from collections import OrderedDict
class DataToken:
def __init__(self, line: str):
(
self.offset,
self.word,
self.lowercase_lemma,
self.pos_tag,
self.mwe_tag,
self.parent_offset,
self.strength,
self.supersense,
self.sentence_id
) = line.split('\t')
class DataSentence:
def __init__(self):
self._tokens: OrderedDict = OrderedDict()
self.sentence_id = None
def __iter__(self):
return iter(self._tokens)
def __getitem__(self, item):
return self._tokens[item]
def __bool__(self):
return bool(self._tokens)
def append(self, token: DataToken):
if not self.sentence_id:
self.sentence_id = token.sentence_id
else:
if token.sentence_id != self.sentence_id:
raise ValueError("sentence IDs do not match: {} (sentence) != {} (token)".format(self.sentence_id,
token.sentence_id))
self._tokens[int(token.offset)] = token
@property
def sentence(self):
return ' '.join(map(lambda t: t.word, self._tokens.values()))
def read_data_file(datafile: str):
data_sentences = {}
with open(datafile) as df:
sentence = DataSentence()
for line in df:
line = line.strip(' \n')
if not line:
# An empty line indicates the end of a sentence.
if sentence:
data_sentences[sentence.sentence_id] = sentence
sentence = DataSentence()
else:
# A new token to be accumulated!
token = DataToken(line)
sentence.append(token)
# Check if there is a valid sentence; this would happen if the file does not end in a newline.
if sentence:
data_sentences[sentence.sentence_id] = sentence
return data_sentences
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('training-data', help='the datafile to train with')
parser.add_argument('testing-data', help='the datafile to test with')
args = parser.parse_args()
# Read the data.
training_sentences = read_data_file(args.training_data)
testing_sentences = read_data_file(args.testing_data)
|
|
d95181d8de55c77e10aca390a40074d5ef0a7bb2
|
angr/procedures/java_lang/string_equals.py
|
angr/procedures/java_lang/string_equals.py
|
from ..java import JavaSimProcedure
import logging
l = logging.getLogger('angr.procedures.java.string.equals')
class StringEquals(JavaSimProcedure):
NO_RET = True
__provides__ = (
("java.lang.String", "equals(java.lang.String)"),
)
def run(self, str_1, str_2):
l.info("Called SimProcedure java.string.equals with args: %s (%r), %s (%r)", str_1, str_1, str_2, str_2)
|
Add dummy Simprocedure for java.lang.string.equals
|
Add dummy Simprocedure for java.lang.string.equals
|
Python
|
bsd-2-clause
|
schieb/angr,schieb/angr,iamahuman/angr,angr/angr,angr/angr,schieb/angr,angr/angr,iamahuman/angr,iamahuman/angr
|
Add dummy Simprocedure for java.lang.string.equals
|
from ..java import JavaSimProcedure
import logging
l = logging.getLogger('angr.procedures.java.string.equals')
class StringEquals(JavaSimProcedure):
NO_RET = True
__provides__ = (
("java.lang.String", "equals(java.lang.String)"),
)
def run(self, str_1, str_2):
l.info("Called SimProcedure java.string.equals with args: %s (%r), %s (%r)", str_1, str_1, str_2, str_2)
|
<commit_before><commit_msg>Add dummy Simprocedure for java.lang.string.equals<commit_after>
|
from ..java import JavaSimProcedure
import logging
l = logging.getLogger('angr.procedures.java.string.equals')
class StringEquals(JavaSimProcedure):
NO_RET = True
__provides__ = (
("java.lang.String", "equals(java.lang.String)"),
)
def run(self, str_1, str_2):
l.info("Called SimProcedure java.string.equals with args: %s (%r), %s (%r)", str_1, str_1, str_2, str_2)
|
Add dummy Simprocedure for java.lang.string.equalsfrom ..java import JavaSimProcedure
import logging
l = logging.getLogger('angr.procedures.java.string.equals')
class StringEquals(JavaSimProcedure):
NO_RET = True
__provides__ = (
("java.lang.String", "equals(java.lang.String)"),
)
def run(self, str_1, str_2):
l.info("Called SimProcedure java.string.equals with args: %s (%r), %s (%r)", str_1, str_1, str_2, str_2)
|
<commit_before><commit_msg>Add dummy Simprocedure for java.lang.string.equals<commit_after>from ..java import JavaSimProcedure
import logging
l = logging.getLogger('angr.procedures.java.string.equals')
class StringEquals(JavaSimProcedure):
NO_RET = True
__provides__ = (
("java.lang.String", "equals(java.lang.String)"),
)
def run(self, str_1, str_2):
l.info("Called SimProcedure java.string.equals with args: %s (%r), %s (%r)", str_1, str_1, str_2, str_2)
|
|
390430b676d88dbe26d28a5551257b3013aee19b
|
copy_selected_photos.py
|
copy_selected_photos.py
|
import pandas as pd
def main():
# create a directory for selected photos
print 'mkdir photos_las_vegas_food_drinks'
# load data about selected photos
selected_photos_business = pd.read_csv('las_vegas_food_drinks.csv')
# for each photo generate copy command
for photo_id in selected_photos_business['photo_id']:
print 'cp -v photos/' + photo_id + '.jpg photos_las_vegas_food_drinks'
if __name__ == "__main__":
main()
|
Add copying of selected photos to a new directory
|
Add copying of selected photos to a new directory
|
Python
|
mit
|
aysent/yelp-photo-explorer
|
Add copying of selected photos to a new directory
|
import pandas as pd
def main():
# create a directory for selected photos
print 'mkdir photos_las_vegas_food_drinks'
# load data about selected photos
selected_photos_business = pd.read_csv('las_vegas_food_drinks.csv')
# for each photo generate copy command
for photo_id in selected_photos_business['photo_id']:
print 'cp -v photos/' + photo_id + '.jpg photos_las_vegas_food_drinks'
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add copying of selected photos to a new directory<commit_after>
|
import pandas as pd
def main():
# create a directory for selected photos
print 'mkdir photos_las_vegas_food_drinks'
# load data about selected photos
selected_photos_business = pd.read_csv('las_vegas_food_drinks.csv')
# for each photo generate copy command
for photo_id in selected_photos_business['photo_id']:
print 'cp -v photos/' + photo_id + '.jpg photos_las_vegas_food_drinks'
if __name__ == "__main__":
main()
|
Add copying of selected photos to a new directoryimport pandas as pd
def main():
# create a directory for selected photos
print 'mkdir photos_las_vegas_food_drinks'
# load data about selected photos
selected_photos_business = pd.read_csv('las_vegas_food_drinks.csv')
# for each photo generate copy command
for photo_id in selected_photos_business['photo_id']:
print 'cp -v photos/' + photo_id + '.jpg photos_las_vegas_food_drinks'
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add copying of selected photos to a new directory<commit_after>import pandas as pd
def main():
# create a directory for selected photos
print 'mkdir photos_las_vegas_food_drinks'
# load data about selected photos
selected_photos_business = pd.read_csv('las_vegas_food_drinks.csv')
# for each photo generate copy command
for photo_id in selected_photos_business['photo_id']:
print 'cp -v photos/' + photo_id + '.jpg photos_las_vegas_food_drinks'
if __name__ == "__main__":
main()
|
|
868c29b23d74ef2dced64eed545995d43f0256ef
|
registries/serializers.py
|
registries/serializers.py
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
Add simple driller list serializer
|
Add simple driller list serializer
|
Python
|
apache-2.0
|
rstens/gwells,rstens/gwells,bcgov/gwells,rstens/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells
|
Add simple driller list serializer
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
<commit_before><commit_msg>Add simple driller list serializer<commit_after>
|
from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
Add simple driller list serializerfrom rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
<commit_before><commit_msg>Add simple driller list serializer<commit_after>from rest_framework import serializers
from registries.models import Organization
from gwells.models import ProvinceState
class DrillerListSerializer(serializers.ModelSerializer):
province_state = serializers.ReadOnlyField()
class Meta:
model = Organization
# Using all fields for now
fields = (
#'who_created',
#'when_created',
#'who_updated',
#'when_updated',
'name',
'street_address',
'city',
'province_state',
'postal_code',
'main_tel',
'fax_tel',
'website_url',
'certificate_authority',
)
|
|
dd0fd45f1324696144267da78a6fe62a8fc346cf
|
lpthw/ex24.py
|
lpthw/ex24.py
|
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
Add work for Exercise 24
|
Add work for Exercise 24
|
Python
|
mit
|
jaredmanning/learning,jaredmanning/learning
|
Add work for Exercise 24
|
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
<commit_before><commit_msg>Add work for Exercise 24<commit_after>
|
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
Add work for Exercise 24print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
<commit_before><commit_msg>Add work for Exercise 24<commit_after>print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
|
610396a7caa1c23532369d4bc5e382dbc21cba60
|
tests/chainer_tests/training_tests/extensions_tests/test_plot_report.py
|
tests/chainer_tests/training_tests/extensions_tests/test_plot_report.py
|
import unittest
from chainer.training import extensions
class TestPlotReport(unittest.TestCase):
def test_available(self):
try:
from matplotlib import pyplot # NOQA
available = True
except ImportError:
available = False
self.assertEqual(extensions.PlotReport.available(), available)
|
Fix PlotReport.available and write test
|
Fix PlotReport.available and write test
|
Python
|
mit
|
ktnyt/chainer,hvy/chainer,jnishi/chainer,anaruse/chainer,chainer/chainer,okuta/chainer,cupy/cupy,niboshi/chainer,cupy/cupy,ysekky/chainer,niboshi/chainer,keisuke-umezawa/chainer,tkerola/chainer,okuta/chainer,chainer/chainer,ktnyt/chainer,rezoo/chainer,wkentaro/chainer,jnishi/chainer,aonotas/chainer,keisuke-umezawa/chainer,okuta/chainer,keisuke-umezawa/chainer,kashif/chainer,wkentaro/chainer,pfnet/chainer,chainer/chainer,jnishi/chainer,ktnyt/chainer,chainer/chainer,jnishi/chainer,ronekko/chainer,cupy/cupy,wkentaro/chainer,hvy/chainer,niboshi/chainer,ktnyt/chainer,wkentaro/chainer,delta2323/chainer,okuta/chainer,hvy/chainer,keisuke-umezawa/chainer,niboshi/chainer,hvy/chainer,cupy/cupy,kiyukuta/chainer
|
Fix PlotReport.available and write test
|
import unittest
from chainer.training import extensions
class TestPlotReport(unittest.TestCase):
def test_available(self):
try:
from matplotlib import pyplot # NOQA
available = True
except ImportError:
available = False
self.assertEqual(extensions.PlotReport.available(), available)
|
<commit_before><commit_msg>Fix PlotReport.available and write test<commit_after>
|
import unittest
from chainer.training import extensions
class TestPlotReport(unittest.TestCase):
def test_available(self):
try:
from matplotlib import pyplot # NOQA
available = True
except ImportError:
available = False
self.assertEqual(extensions.PlotReport.available(), available)
|
Fix PlotReport.available and write testimport unittest
from chainer.training import extensions
class TestPlotReport(unittest.TestCase):
def test_available(self):
try:
from matplotlib import pyplot # NOQA
available = True
except ImportError:
available = False
self.assertEqual(extensions.PlotReport.available(), available)
|
<commit_before><commit_msg>Fix PlotReport.available and write test<commit_after>import unittest
from chainer.training import extensions
class TestPlotReport(unittest.TestCase):
def test_available(self):
try:
from matplotlib import pyplot # NOQA
available = True
except ImportError:
available = False
self.assertEqual(extensions.PlotReport.available(), available)
|
|
4dd4557796acca3046806a9504d8d6bcb78ca16d
|
test/test_UrbanDict.py
|
test/test_UrbanDict.py
|
#!/usr/bin/env python
###
# Copyright (c) 2004, Kevin Murphy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from testsupport import *
class UrbanDictTestCase(ChannelPluginTestCase, PluginDocumentation):
plugins = ('UrbanDict',)
def setUp(self):
ChannelPluginTestCase.setUp(self)
if network:
def testUrbanDict(self):
self.assertNotError('ud')
self.assertRegexp('ud somethinginvalid',
'Error: No definition found.')
self.assertRegexp('ud yo',
'a word that died a horrible death in 1993')
self.assertRegexp('ud bozac',
'"Druid Diallect"')
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|
Test cases for UrbanDict plugin
|
Test cases for UrbanDict plugin
|
Python
|
bsd-3-clause
|
kblin/supybot-gsoc,ProgVal/Limnoria-test,raboof/supybot,Ban3/Limnoria,Ban3/Limnoria,jeffmahoney/supybot,mazaclub/mazabot-core,buildbot/supybot,mazaclub/mazabot-core,frumiousbandersnatch/supybot-code,prashantpawar/supybot-rothbot,ProgVal/Limnoria-test,haxwithaxe/supybot
|
Test cases for UrbanDict plugin
|
#!/usr/bin/env python
###
# Copyright (c) 2004, Kevin Murphy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from testsupport import *
class UrbanDictTestCase(ChannelPluginTestCase, PluginDocumentation):
plugins = ('UrbanDict',)
def setUp(self):
ChannelPluginTestCase.setUp(self)
if network:
def testUrbanDict(self):
self.assertNotError('ud')
self.assertRegexp('ud somethinginvalid',
'Error: No definition found.')
self.assertRegexp('ud yo',
'a word that died a horrible death in 1993')
self.assertRegexp('ud bozac',
'"Druid Diallect"')
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|
<commit_before><commit_msg>Test cases for UrbanDict plugin<commit_after>
|
#!/usr/bin/env python
###
# Copyright (c) 2004, Kevin Murphy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from testsupport import *
class UrbanDictTestCase(ChannelPluginTestCase, PluginDocumentation):
plugins = ('UrbanDict',)
def setUp(self):
ChannelPluginTestCase.setUp(self)
if network:
def testUrbanDict(self):
self.assertNotError('ud')
self.assertRegexp('ud somethinginvalid',
'Error: No definition found.')
self.assertRegexp('ud yo',
'a word that died a horrible death in 1993')
self.assertRegexp('ud bozac',
'"Druid Diallect"')
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|
Test cases for UrbanDict plugin#!/usr/bin/env python
###
# Copyright (c) 2004, Kevin Murphy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from testsupport import *
class UrbanDictTestCase(ChannelPluginTestCase, PluginDocumentation):
plugins = ('UrbanDict',)
def setUp(self):
ChannelPluginTestCase.setUp(self)
if network:
def testUrbanDict(self):
self.assertNotError('ud')
self.assertRegexp('ud somethinginvalid',
'Error: No definition found.')
self.assertRegexp('ud yo',
'a word that died a horrible death in 1993')
self.assertRegexp('ud bozac',
'"Druid Diallect"')
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|
<commit_before><commit_msg>Test cases for UrbanDict plugin<commit_after>#!/usr/bin/env python
###
# Copyright (c) 2004, Kevin Murphy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from testsupport import *
class UrbanDictTestCase(ChannelPluginTestCase, PluginDocumentation):
plugins = ('UrbanDict',)
def setUp(self):
ChannelPluginTestCase.setUp(self)
if network:
def testUrbanDict(self):
self.assertNotError('ud')
self.assertRegexp('ud somethinginvalid',
'Error: No definition found.')
self.assertRegexp('ud yo',
'a word that died a horrible death in 1993')
self.assertRegexp('ud bozac',
'"Druid Diallect"')
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|
|
263194056dbd749681f36823b77bb0ba2c76e379
|
tests/test_es.py
|
tests/test_es.py
|
# -*- coding: utf-8 -*-
"""Elasticsearch client test cases."""
import unittest
from sqlalchemy import types as sql_types
from esis.es import Mapping
class MappingTest(unittest.TestCase):
"""Test translation from SQL schema to Elasticsearch mapping."""
def test_mapping_types(self):
"""Test mapping from sql to Elasticsearch index types."""
table_name = 'my_table'
table_schema = {
'my_bigint': sql_types.BIGINT(),
'my_boolean': sql_types.BOOLEAN(),
'my_char': sql_types.CHAR(16),
'my_clob': sql_types.CLOB(),
'my_date': sql_types.DATE(),
'my_datetime': sql_types.DATETIME(),
'my_decimal': sql_types.DECIMAL(10, 5),
'my_float': sql_types.FLOAT(),
'my_integer': sql_types.INTEGER(),
'my_nchar': sql_types.NCHAR(16),
'my_nvarchar': sql_types.NVARCHAR(16),
'my_null': sql_types.NullType(),
'my_numeric': sql_types.NUMERIC(),
'my_real': sql_types.REAL(),
'my_smallint': sql_types.SMALLINT(),
'my_text': sql_types.TEXT(),
'my_timestamp': sql_types.TIMESTAMP(),
'my_varchar': sql_types.VARCHAR(16),
}
mapping = Mapping(table_name, table_schema)
self.assertDictEqual(
mapping.mapping,
{
table_name: {
'properties': {
'my_bigint': {'type': 'long'},
'my_boolean': {'type': 'boolean'},
'my_char': {'type': 'string'},
'my_clob': {'type': 'string'},
'my_datetime': {'type': 'date'},
'my_float': {'type': 'float'},
'my_integer': {'type': 'long'},
'my_nchar': {'type': 'string'},
'my_nvarchar': {'type': 'string'},
'my_real': {'type': 'double'},
'my_smallint': {'type': 'integer'},
'my_text': {'type': 'string'},
'my_timestamp': {'type': 'date'},
'my_varchar': {'type': 'string'},
},
},
},
)
|
Add Elasticsearch mapping test case
|
Add Elasticsearch mapping test case
|
Python
|
mit
|
jcollado/esis
|
Add Elasticsearch mapping test case
|
# -*- coding: utf-8 -*-
"""Elasticsearch client test cases."""
import unittest
from sqlalchemy import types as sql_types
from esis.es import Mapping
class MappingTest(unittest.TestCase):
"""Test translation from SQL schema to Elasticsearch mapping."""
def test_mapping_types(self):
"""Test mapping from sql to Elasticsearch index types."""
table_name = 'my_table'
table_schema = {
'my_bigint': sql_types.BIGINT(),
'my_boolean': sql_types.BOOLEAN(),
'my_char': sql_types.CHAR(16),
'my_clob': sql_types.CLOB(),
'my_date': sql_types.DATE(),
'my_datetime': sql_types.DATETIME(),
'my_decimal': sql_types.DECIMAL(10, 5),
'my_float': sql_types.FLOAT(),
'my_integer': sql_types.INTEGER(),
'my_nchar': sql_types.NCHAR(16),
'my_nvarchar': sql_types.NVARCHAR(16),
'my_null': sql_types.NullType(),
'my_numeric': sql_types.NUMERIC(),
'my_real': sql_types.REAL(),
'my_smallint': sql_types.SMALLINT(),
'my_text': sql_types.TEXT(),
'my_timestamp': sql_types.TIMESTAMP(),
'my_varchar': sql_types.VARCHAR(16),
}
mapping = Mapping(table_name, table_schema)
self.assertDictEqual(
mapping.mapping,
{
table_name: {
'properties': {
'my_bigint': {'type': 'long'},
'my_boolean': {'type': 'boolean'},
'my_char': {'type': 'string'},
'my_clob': {'type': 'string'},
'my_datetime': {'type': 'date'},
'my_float': {'type': 'float'},
'my_integer': {'type': 'long'},
'my_nchar': {'type': 'string'},
'my_nvarchar': {'type': 'string'},
'my_real': {'type': 'double'},
'my_smallint': {'type': 'integer'},
'my_text': {'type': 'string'},
'my_timestamp': {'type': 'date'},
'my_varchar': {'type': 'string'},
},
},
},
)
|
<commit_before><commit_msg>Add Elasticsearch mapping test case<commit_after>
|
# -*- coding: utf-8 -*-
"""Elasticsearch client test cases."""
import unittest
from sqlalchemy import types as sql_types
from esis.es import Mapping
class MappingTest(unittest.TestCase):
"""Test translation from SQL schema to Elasticsearch mapping."""
def test_mapping_types(self):
"""Test mapping from sql to Elasticsearch index types."""
table_name = 'my_table'
table_schema = {
'my_bigint': sql_types.BIGINT(),
'my_boolean': sql_types.BOOLEAN(),
'my_char': sql_types.CHAR(16),
'my_clob': sql_types.CLOB(),
'my_date': sql_types.DATE(),
'my_datetime': sql_types.DATETIME(),
'my_decimal': sql_types.DECIMAL(10, 5),
'my_float': sql_types.FLOAT(),
'my_integer': sql_types.INTEGER(),
'my_nchar': sql_types.NCHAR(16),
'my_nvarchar': sql_types.NVARCHAR(16),
'my_null': sql_types.NullType(),
'my_numeric': sql_types.NUMERIC(),
'my_real': sql_types.REAL(),
'my_smallint': sql_types.SMALLINT(),
'my_text': sql_types.TEXT(),
'my_timestamp': sql_types.TIMESTAMP(),
'my_varchar': sql_types.VARCHAR(16),
}
mapping = Mapping(table_name, table_schema)
self.assertDictEqual(
mapping.mapping,
{
table_name: {
'properties': {
'my_bigint': {'type': 'long'},
'my_boolean': {'type': 'boolean'},
'my_char': {'type': 'string'},
'my_clob': {'type': 'string'},
'my_datetime': {'type': 'date'},
'my_float': {'type': 'float'},
'my_integer': {'type': 'long'},
'my_nchar': {'type': 'string'},
'my_nvarchar': {'type': 'string'},
'my_real': {'type': 'double'},
'my_smallint': {'type': 'integer'},
'my_text': {'type': 'string'},
'my_timestamp': {'type': 'date'},
'my_varchar': {'type': 'string'},
},
},
},
)
|
Add Elasticsearch mapping test case# -*- coding: utf-8 -*-
"""Elasticsearch client test cases."""
import unittest
from sqlalchemy import types as sql_types
from esis.es import Mapping
class MappingTest(unittest.TestCase):
"""Test translation from SQL schema to Elasticsearch mapping."""
def test_mapping_types(self):
"""Test mapping from sql to Elasticsearch index types."""
table_name = 'my_table'
table_schema = {
'my_bigint': sql_types.BIGINT(),
'my_boolean': sql_types.BOOLEAN(),
'my_char': sql_types.CHAR(16),
'my_clob': sql_types.CLOB(),
'my_date': sql_types.DATE(),
'my_datetime': sql_types.DATETIME(),
'my_decimal': sql_types.DECIMAL(10, 5),
'my_float': sql_types.FLOAT(),
'my_integer': sql_types.INTEGER(),
'my_nchar': sql_types.NCHAR(16),
'my_nvarchar': sql_types.NVARCHAR(16),
'my_null': sql_types.NullType(),
'my_numeric': sql_types.NUMERIC(),
'my_real': sql_types.REAL(),
'my_smallint': sql_types.SMALLINT(),
'my_text': sql_types.TEXT(),
'my_timestamp': sql_types.TIMESTAMP(),
'my_varchar': sql_types.VARCHAR(16),
}
mapping = Mapping(table_name, table_schema)
self.assertDictEqual(
mapping.mapping,
{
table_name: {
'properties': {
'my_bigint': {'type': 'long'},
'my_boolean': {'type': 'boolean'},
'my_char': {'type': 'string'},
'my_clob': {'type': 'string'},
'my_datetime': {'type': 'date'},
'my_float': {'type': 'float'},
'my_integer': {'type': 'long'},
'my_nchar': {'type': 'string'},
'my_nvarchar': {'type': 'string'},
'my_real': {'type': 'double'},
'my_smallint': {'type': 'integer'},
'my_text': {'type': 'string'},
'my_timestamp': {'type': 'date'},
'my_varchar': {'type': 'string'},
},
},
},
)
|
<commit_before><commit_msg>Add Elasticsearch mapping test case<commit_after># -*- coding: utf-8 -*-
"""Elasticsearch client test cases."""
import unittest
from sqlalchemy import types as sql_types
from esis.es import Mapping
class MappingTest(unittest.TestCase):
"""Test translation from SQL schema to Elasticsearch mapping."""
def test_mapping_types(self):
"""Test mapping from sql to Elasticsearch index types."""
table_name = 'my_table'
table_schema = {
'my_bigint': sql_types.BIGINT(),
'my_boolean': sql_types.BOOLEAN(),
'my_char': sql_types.CHAR(16),
'my_clob': sql_types.CLOB(),
'my_date': sql_types.DATE(),
'my_datetime': sql_types.DATETIME(),
'my_decimal': sql_types.DECIMAL(10, 5),
'my_float': sql_types.FLOAT(),
'my_integer': sql_types.INTEGER(),
'my_nchar': sql_types.NCHAR(16),
'my_nvarchar': sql_types.NVARCHAR(16),
'my_null': sql_types.NullType(),
'my_numeric': sql_types.NUMERIC(),
'my_real': sql_types.REAL(),
'my_smallint': sql_types.SMALLINT(),
'my_text': sql_types.TEXT(),
'my_timestamp': sql_types.TIMESTAMP(),
'my_varchar': sql_types.VARCHAR(16),
}
mapping = Mapping(table_name, table_schema)
self.assertDictEqual(
mapping.mapping,
{
table_name: {
'properties': {
'my_bigint': {'type': 'long'},
'my_boolean': {'type': 'boolean'},
'my_char': {'type': 'string'},
'my_clob': {'type': 'string'},
'my_datetime': {'type': 'date'},
'my_float': {'type': 'float'},
'my_integer': {'type': 'long'},
'my_nchar': {'type': 'string'},
'my_nvarchar': {'type': 'string'},
'my_real': {'type': 'double'},
'my_smallint': {'type': 'integer'},
'my_text': {'type': 'string'},
'my_timestamp': {'type': 'date'},
'my_varchar': {'type': 'string'},
},
},
},
)
|
|
32f48a7f16fe5657ac02ea5c8521ecb068e4ac77
|
pontoon/base/migrations/0013_add_en_US.py
|
pontoon/base/migrations/0013_add_en_US.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_back_source_locales(apps, schema_editor):
Locale = apps.get_model('base', 'Locale')
Locale.objects.create(
code='en',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
Locale.objects.create(
code='en-US',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
class Migration(migrations.Migration):
dependencies = [
('base', '0012_auto_20150804_0859'),
]
operations = [
migrations.RunPython(add_back_source_locales)
]
|
Add en and en-US locales back to locale list.
|
Add en and en-US locales back to locale list.
Both are source locales and shouldn't have been removed.
|
Python
|
bsd-3-clause
|
mozilla/pontoon,Osmose/pontoon,mastizada/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,sudheesh001/pontoon,mastizada/pontoon,participedia/pontoon,participedia/pontoon,jotes/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,sudheesh001/pontoon,jotes/pontoon,vivekanand1101/pontoon,Osmose/pontoon,Osmose/pontoon,sudheesh001/pontoon,yfdyh000/pontoon,mastizada/pontoon,mathjazz/pontoon,yfdyh000/pontoon,m8ttyB/pontoon,m8ttyB/pontoon,participedia/pontoon,yfdyh000/pontoon,sudheesh001/pontoon,vivekanand1101/pontoon,participedia/pontoon,mathjazz/pontoon,mozilla/pontoon,mastizada/pontoon,mozilla/pontoon,m8ttyB/pontoon,mathjazz/pontoon,jotes/pontoon,jotes/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,Osmose/pontoon,mozilla/pontoon,mozilla/pontoon
|
Add en and en-US locales back to locale list.
Both are source locales and shouldn't have been removed.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_back_source_locales(apps, schema_editor):
Locale = apps.get_model('base', 'Locale')
Locale.objects.create(
code='en',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
Locale.objects.create(
code='en-US',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
class Migration(migrations.Migration):
dependencies = [
('base', '0012_auto_20150804_0859'),
]
operations = [
migrations.RunPython(add_back_source_locales)
]
|
<commit_before><commit_msg>Add en and en-US locales back to locale list.
Both are source locales and shouldn't have been removed.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_back_source_locales(apps, schema_editor):
Locale = apps.get_model('base', 'Locale')
Locale.objects.create(
code='en',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
Locale.objects.create(
code='en-US',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
class Migration(migrations.Migration):
dependencies = [
('base', '0012_auto_20150804_0859'),
]
operations = [
migrations.RunPython(add_back_source_locales)
]
|
Add en and en-US locales back to locale list.
Both are source locales and shouldn't have been removed.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_back_source_locales(apps, schema_editor):
Locale = apps.get_model('base', 'Locale')
Locale.objects.create(
code='en',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
Locale.objects.create(
code='en-US',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
class Migration(migrations.Migration):
dependencies = [
('base', '0012_auto_20150804_0859'),
]
operations = [
migrations.RunPython(add_back_source_locales)
]
|
<commit_before><commit_msg>Add en and en-US locales back to locale list.
Both are source locales and shouldn't have been removed.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_back_source_locales(apps, schema_editor):
Locale = apps.get_model('base', 'Locale')
Locale.objects.create(
code='en',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
Locale.objects.create(
code='en-US',
name='English',
nplurals=2,
plural_rule='(n != 1)',
cldr_plurals='1,5'
)
class Migration(migrations.Migration):
dependencies = [
('base', '0012_auto_20150804_0859'),
]
operations = [
migrations.RunPython(add_back_source_locales)
]
|
|
82943db5942f6d0749776efb1b1acf928af97a1c
|
example.py
|
example.py
|
#!/usr/bin/env python3
import numpy as np
import rust_sorting as rs
array = np.zeros((5,), dtype=np.int8)
rs.quicksort(array)
print("Python done!")
|
Call quicksort from Python; it works!
|
Call quicksort from Python; it works!
|
Python
|
bsd-3-clause
|
nbigaouette/rust-sorting,nbigaouette/rust-sorting,nbigaouette/rust-sorting
|
Call quicksort from Python; it works!
|
#!/usr/bin/env python3
import numpy as np
import rust_sorting as rs
array = np.zeros((5,), dtype=np.int8)
rs.quicksort(array)
print("Python done!")
|
<commit_before><commit_msg>Call quicksort from Python; it works!<commit_after>
|
#!/usr/bin/env python3
import numpy as np
import rust_sorting as rs
array = np.zeros((5,), dtype=np.int8)
rs.quicksort(array)
print("Python done!")
|
Call quicksort from Python; it works!#!/usr/bin/env python3
import numpy as np
import rust_sorting as rs
array = np.zeros((5,), dtype=np.int8)
rs.quicksort(array)
print("Python done!")
|
<commit_before><commit_msg>Call quicksort from Python; it works!<commit_after>#!/usr/bin/env python3
import numpy as np
import rust_sorting as rs
array = np.zeros((5,), dtype=np.int8)
rs.quicksort(array)
print("Python done!")
|
|
b12dd992da93d43e5acf1d42cee0f7d34f8367bf
|
spam/tests/fake_params.py
|
spam/tests/fake_params.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DATASET_PATH = os.path.join(
os.path.dirname(__file__),
'test_dataset',
)
DATASET_SUBDIRS = [
{
'name': 'enron1',
'total_count': 5,
'ham_count': 3,
'spam_count': 2,
'path': os.path.join(DATASET_PATH, 'enron1'),
'ham_path': os.path.join(
DATASET_PATH,
'enron1',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron1',
'spam'
),
},
{
'name': 'enron2',
'total_count': 6,
'ham_count': 2,
'spam_count': 4,
'path': os.path.join(DATASET_PATH, 'enron2'),
'ham_path': os.path.join(
DATASET_PATH,
'enron2',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron2',
'spam'
),
},
]
|
Add fake params from test.
|
Add fake params from test.
|
Python
|
mit
|
benigls/spam,benigls/spam
|
Add fake params from test.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DATASET_PATH = os.path.join(
os.path.dirname(__file__),
'test_dataset',
)
DATASET_SUBDIRS = [
{
'name': 'enron1',
'total_count': 5,
'ham_count': 3,
'spam_count': 2,
'path': os.path.join(DATASET_PATH, 'enron1'),
'ham_path': os.path.join(
DATASET_PATH,
'enron1',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron1',
'spam'
),
},
{
'name': 'enron2',
'total_count': 6,
'ham_count': 2,
'spam_count': 4,
'path': os.path.join(DATASET_PATH, 'enron2'),
'ham_path': os.path.join(
DATASET_PATH,
'enron2',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron2',
'spam'
),
},
]
|
<commit_before><commit_msg>Add fake params from test.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DATASET_PATH = os.path.join(
os.path.dirname(__file__),
'test_dataset',
)
DATASET_SUBDIRS = [
{
'name': 'enron1',
'total_count': 5,
'ham_count': 3,
'spam_count': 2,
'path': os.path.join(DATASET_PATH, 'enron1'),
'ham_path': os.path.join(
DATASET_PATH,
'enron1',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron1',
'spam'
),
},
{
'name': 'enron2',
'total_count': 6,
'ham_count': 2,
'spam_count': 4,
'path': os.path.join(DATASET_PATH, 'enron2'),
'ham_path': os.path.join(
DATASET_PATH,
'enron2',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron2',
'spam'
),
},
]
|
Add fake params from test.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DATASET_PATH = os.path.join(
os.path.dirname(__file__),
'test_dataset',
)
DATASET_SUBDIRS = [
{
'name': 'enron1',
'total_count': 5,
'ham_count': 3,
'spam_count': 2,
'path': os.path.join(DATASET_PATH, 'enron1'),
'ham_path': os.path.join(
DATASET_PATH,
'enron1',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron1',
'spam'
),
},
{
'name': 'enron2',
'total_count': 6,
'ham_count': 2,
'spam_count': 4,
'path': os.path.join(DATASET_PATH, 'enron2'),
'ham_path': os.path.join(
DATASET_PATH,
'enron2',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron2',
'spam'
),
},
]
|
<commit_before><commit_msg>Add fake params from test.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DATASET_PATH = os.path.join(
os.path.dirname(__file__),
'test_dataset',
)
DATASET_SUBDIRS = [
{
'name': 'enron1',
'total_count': 5,
'ham_count': 3,
'spam_count': 2,
'path': os.path.join(DATASET_PATH, 'enron1'),
'ham_path': os.path.join(
DATASET_PATH,
'enron1',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron1',
'spam'
),
},
{
'name': 'enron2',
'total_count': 6,
'ham_count': 2,
'spam_count': 4,
'path': os.path.join(DATASET_PATH, 'enron2'),
'ham_path': os.path.join(
DATASET_PATH,
'enron2',
'ham'
),
'spam_path': os.path.join(
DATASET_PATH,
'enron2',
'spam'
),
},
]
|
|
0cdf73f158e425db0ab65d70966b18ef9f4af1c6
|
test/test_services_view.py
|
test/test_services_view.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import set_app_config
from alignak_app.synthesis.services_view import ServicesView
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication
except ImportError:
from PyQt4.Qt import QApplication
class TestServicesView(unittest2.TestCase):
"""
This file test the ServicesView class.
"""
set_app_config()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_display_services(self):
"""Inititalize ServicesView"""
under_test = ServicesView()
self.assertIsNotNone(under_test.layout)
under_test.display_services(None, 'name')
self.assertIsNotNone(under_test.layout)
|
Add unit tests for services_view.py
|
Add unit tests for services_view.py
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-app,Alignak-monitoring-contrib/alignak-app
|
Add unit tests for services_view.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import set_app_config
from alignak_app.synthesis.services_view import ServicesView
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication
except ImportError:
from PyQt4.Qt import QApplication
class TestServicesView(unittest2.TestCase):
"""
This file test the ServicesView class.
"""
set_app_config()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_display_services(self):
"""Inititalize ServicesView"""
under_test = ServicesView()
self.assertIsNotNone(under_test.layout)
under_test.display_services(None, 'name')
self.assertIsNotNone(under_test.layout)
|
<commit_before><commit_msg>Add unit tests for services_view.py<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import set_app_config
from alignak_app.synthesis.services_view import ServicesView
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication
except ImportError:
from PyQt4.Qt import QApplication
class TestServicesView(unittest2.TestCase):
"""
This file test the ServicesView class.
"""
set_app_config()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_display_services(self):
"""Inititalize ServicesView"""
under_test = ServicesView()
self.assertIsNotNone(under_test.layout)
under_test.display_services(None, 'name')
self.assertIsNotNone(under_test.layout)
|
Add unit tests for services_view.py#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import set_app_config
from alignak_app.synthesis.services_view import ServicesView
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication
except ImportError:
from PyQt4.Qt import QApplication
class TestServicesView(unittest2.TestCase):
"""
This file test the ServicesView class.
"""
set_app_config()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_display_services(self):
"""Inititalize ServicesView"""
under_test = ServicesView()
self.assertIsNotNone(under_test.layout)
under_test.display_services(None, 'name')
self.assertIsNotNone(under_test.layout)
|
<commit_before><commit_msg>Add unit tests for services_view.py<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.core.utils import set_app_config
from alignak_app.synthesis.services_view import ServicesView
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication
except ImportError:
from PyQt4.Qt import QApplication
class TestServicesView(unittest2.TestCase):
"""
This file test the ServicesView class.
"""
set_app_config()
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
except:
pass
def test_display_services(self):
"""Inititalize ServicesView"""
under_test = ServicesView()
self.assertIsNotNone(under_test.layout)
under_test.display_services(None, 'name')
self.assertIsNotNone(under_test.layout)
|
|
78e84abea9b0cc41bdb1aa8b9ea5c9953a37941d
|
bdbcontrib/contrib_exp.py
|
bdbcontrib/contrib_exp.py
|
from bayeslite.shell.hook import bayesdb_shell_cmd
from bdbcontrib.general_utils import ArgparseError, ArgumentParser
import bdbexp
experiments = {
'haystacks' : bdbexp.haystacks,
'hyperparams' : bdbexp.hyperparams,
'infer' : bdbexp.infer,
'kl_divergence' : bdbexp.kl_divergence,
'permute' : bdbexp.permute,
'predictive_pdf' : bdbexp.predictive_pdf,
'recover' : bdbexp.recover
}
@bayesdb_shell_cmd('experiment')
def experiment(self, argin):
'''
Launch an experimental inference quality test.
USAGE: .experiment <exp_name> [exp_args ...]
<exp_name>
permute kl_divergence
haystacks predictive_pdf
hyperparams recover
infer
[exp_args]
To see experiment specific arguments, use
.experiment <exp_name> --help
Examples:
bayeslite> .experiment predictive_pdf --help
bayeslite> .experiment haystacks --n_iter=200 --n_distractors=4
'''
expname = argin.split()[0] if argin != '' else argin
if expname not in experiments:
print 'Invalid experiment {}'.format(expname)
print 'For help use: .help experiment'
return
try:
experiments[argin.split()[0]].main(argin.split()[1:], halt=True)
except ArgparseError as e:
self.stdout.write('%s' % (e.message,))
return
except SystemExit:
return # This happens when --help is invoked.
|
Add hooks for inference experiments.
|
Add hooks for inference experiments.
|
Python
|
apache-2.0
|
probcomp/bdbcontrib,probcomp/bdbcontrib
|
Add hooks for inference experiments.
|
from bayeslite.shell.hook import bayesdb_shell_cmd
from bdbcontrib.general_utils import ArgparseError, ArgumentParser
import bdbexp
experiments = {
'haystacks' : bdbexp.haystacks,
'hyperparams' : bdbexp.hyperparams,
'infer' : bdbexp.infer,
'kl_divergence' : bdbexp.kl_divergence,
'permute' : bdbexp.permute,
'predictive_pdf' : bdbexp.predictive_pdf,
'recover' : bdbexp.recover
}
@bayesdb_shell_cmd('experiment')
def experiment(self, argin):
'''
Launch an experimental inference quality test.
USAGE: .experiment <exp_name> [exp_args ...]
<exp_name>
permute kl_divergence
haystacks predictive_pdf
hyperparams recover
infer
[exp_args]
To see experiment specific arguments, use
.experiment <exp_name> --help
Examples:
bayeslite> .experiment predictive_pdf --help
bayeslite> .experiment haystacks --n_iter=200 --n_distractors=4
'''
expname = argin.split()[0] if argin != '' else argin
if expname not in experiments:
print 'Invalid experiment {}'.format(expname)
print 'For help use: .help experiment'
return
try:
experiments[argin.split()[0]].main(argin.split()[1:], halt=True)
except ArgparseError as e:
self.stdout.write('%s' % (e.message,))
return
except SystemExit:
return # This happens when --help is invoked.
|
<commit_before><commit_msg>Add hooks for inference experiments.<commit_after>
|
from bayeslite.shell.hook import bayesdb_shell_cmd
from bdbcontrib.general_utils import ArgparseError, ArgumentParser
import bdbexp
experiments = {
'haystacks' : bdbexp.haystacks,
'hyperparams' : bdbexp.hyperparams,
'infer' : bdbexp.infer,
'kl_divergence' : bdbexp.kl_divergence,
'permute' : bdbexp.permute,
'predictive_pdf' : bdbexp.predictive_pdf,
'recover' : bdbexp.recover
}
@bayesdb_shell_cmd('experiment')
def experiment(self, argin):
'''
Launch an experimental inference quality test.
USAGE: .experiment <exp_name> [exp_args ...]
<exp_name>
permute kl_divergence
haystacks predictive_pdf
hyperparams recover
infer
[exp_args]
To see experiment specific arguments, use
.experiment <exp_name> --help
Examples:
bayeslite> .experiment predictive_pdf --help
bayeslite> .experiment haystacks --n_iter=200 --n_distractors=4
'''
expname = argin.split()[0] if argin != '' else argin
if expname not in experiments:
print 'Invalid experiment {}'.format(expname)
print 'For help use: .help experiment'
return
try:
experiments[argin.split()[0]].main(argin.split()[1:], halt=True)
except ArgparseError as e:
self.stdout.write('%s' % (e.message,))
return
except SystemExit:
return # This happens when --help is invoked.
|
Add hooks for inference experiments.from bayeslite.shell.hook import bayesdb_shell_cmd
from bdbcontrib.general_utils import ArgparseError, ArgumentParser
import bdbexp
experiments = {
'haystacks' : bdbexp.haystacks,
'hyperparams' : bdbexp.hyperparams,
'infer' : bdbexp.infer,
'kl_divergence' : bdbexp.kl_divergence,
'permute' : bdbexp.permute,
'predictive_pdf' : bdbexp.predictive_pdf,
'recover' : bdbexp.recover
}
@bayesdb_shell_cmd('experiment')
def experiment(self, argin):
'''
Launch an experimental inference quality test.
USAGE: .experiment <exp_name> [exp_args ...]
<exp_name>
permute kl_divergence
haystacks predictive_pdf
hyperparams recover
infer
[exp_args]
To see experiment specific arguments, use
.experiment <exp_name> --help
Examples:
bayeslite> .experiment predictive_pdf --help
bayeslite> .experiment haystacks --n_iter=200 --n_distractors=4
'''
expname = argin.split()[0] if argin != '' else argin
if expname not in experiments:
print 'Invalid experiment {}'.format(expname)
print 'For help use: .help experiment'
return
try:
experiments[argin.split()[0]].main(argin.split()[1:], halt=True)
except ArgparseError as e:
self.stdout.write('%s' % (e.message,))
return
except SystemExit:
return # This happens when --help is invoked.
|
<commit_before><commit_msg>Add hooks for inference experiments.<commit_after>from bayeslite.shell.hook import bayesdb_shell_cmd
from bdbcontrib.general_utils import ArgparseError, ArgumentParser
import bdbexp
experiments = {
'haystacks' : bdbexp.haystacks,
'hyperparams' : bdbexp.hyperparams,
'infer' : bdbexp.infer,
'kl_divergence' : bdbexp.kl_divergence,
'permute' : bdbexp.permute,
'predictive_pdf' : bdbexp.predictive_pdf,
'recover' : bdbexp.recover
}
@bayesdb_shell_cmd('experiment')
def experiment(self, argin):
'''
Launch an experimental inference quality test.
USAGE: .experiment <exp_name> [exp_args ...]
<exp_name>
permute kl_divergence
haystacks predictive_pdf
hyperparams recover
infer
[exp_args]
To see experiment specific arguments, use
.experiment <exp_name> --help
Examples:
bayeslite> .experiment predictive_pdf --help
bayeslite> .experiment haystacks --n_iter=200 --n_distractors=4
'''
expname = argin.split()[0] if argin != '' else argin
if expname not in experiments:
print 'Invalid experiment {}'.format(expname)
print 'For help use: .help experiment'
return
try:
experiments[argin.split()[0]].main(argin.split()[1:], halt=True)
except ArgparseError as e:
self.stdout.write('%s' % (e.message,))
return
except SystemExit:
return # This happens when --help is invoked.
|
|
c860d536710d2d9546924b00aef7de4f516e38a5
|
tools/fixup_io_tilegrid.py
|
tools/fixup_io_tilegrid.py
|
import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data
This script fixes this by patching tile names
"""
for f, d in [("LIFCL", "LIFCL-40")]:
tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
with open(tgp, "r") as infile:
tg = json.load(infile)["tiles"]
tiles_by_xy = [[]]
max_row = 0
max_col = 0
for tile in sorted(tg.keys()):
r, c = tiles.pos_from_name(tile)
max_row = max(r, max_row)
max_col = max(c, max_col)
while r >= len(tiles_by_xy):
tiles_by_xy.append([])
while c >= len(tiles_by_xy[r]):
tiles_by_xy[r].append([])
tiles_by_xy[r][c].append(tile)
# Top tiles
is_odd = False
for col in tiles_by_xy[0]:
for tile in col:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
# Don't rename special or already-renamed tiles
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
# Left/right tiles
for tc in (0, max_col):
is_odd = False
bank = ""
for row in tiles_by_xy:
for tile in row[tc]:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
if tt.endswith("REM"):
continue
tile_bank = tt[tt.find("B")+1]
if tile_bank != bank:
is_odd = False
bank = tile_bank
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
with open(tgp, "w") as outfile:
json.dump({"tiles": tg}, outfile, sort_keys=True, indent=4)
|
Add tool to fix IO tilegrid
|
Add tool to fix IO tilegrid
Signed-off-by: David Shah <bfcdf3e6ca6cef45543bfbb57509c92aec9a39fb@ds0.me>
|
Python
|
isc
|
gatecat/prjoxide,gatecat/prjoxide,gatecat/prjoxide
|
Add tool to fix IO tilegrid
Signed-off-by: David Shah <bfcdf3e6ca6cef45543bfbb57509c92aec9a39fb@ds0.me>
|
import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data
This script fixes this by patching tile names
"""
for f, d in [("LIFCL", "LIFCL-40")]:
tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
with open(tgp, "r") as infile:
tg = json.load(infile)["tiles"]
tiles_by_xy = [[]]
max_row = 0
max_col = 0
for tile in sorted(tg.keys()):
r, c = tiles.pos_from_name(tile)
max_row = max(r, max_row)
max_col = max(c, max_col)
while r >= len(tiles_by_xy):
tiles_by_xy.append([])
while c >= len(tiles_by_xy[r]):
tiles_by_xy[r].append([])
tiles_by_xy[r][c].append(tile)
# Top tiles
is_odd = False
for col in tiles_by_xy[0]:
for tile in col:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
# Don't rename special or already-renamed tiles
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
# Left/right tiles
for tc in (0, max_col):
is_odd = False
bank = ""
for row in tiles_by_xy:
for tile in row[tc]:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
if tt.endswith("REM"):
continue
tile_bank = tt[tt.find("B")+1]
if tile_bank != bank:
is_odd = False
bank = tile_bank
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
with open(tgp, "w") as outfile:
json.dump({"tiles": tg}, outfile, sort_keys=True, indent=4)
|
<commit_before><commit_msg>Add tool to fix IO tilegrid
Signed-off-by: David Shah <bfcdf3e6ca6cef45543bfbb57509c92aec9a39fb@ds0.me><commit_after>
|
import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data
This script fixes this by patching tile names
"""
for f, d in [("LIFCL", "LIFCL-40")]:
tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
with open(tgp, "r") as infile:
tg = json.load(infile)["tiles"]
tiles_by_xy = [[]]
max_row = 0
max_col = 0
for tile in sorted(tg.keys()):
r, c = tiles.pos_from_name(tile)
max_row = max(r, max_row)
max_col = max(c, max_col)
while r >= len(tiles_by_xy):
tiles_by_xy.append([])
while c >= len(tiles_by_xy[r]):
tiles_by_xy[r].append([])
tiles_by_xy[r][c].append(tile)
# Top tiles
is_odd = False
for col in tiles_by_xy[0]:
for tile in col:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
# Don't rename special or already-renamed tiles
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
# Left/right tiles
for tc in (0, max_col):
is_odd = False
bank = ""
for row in tiles_by_xy:
for tile in row[tc]:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
if tt.endswith("REM"):
continue
tile_bank = tt[tt.find("B")+1]
if tile_bank != bank:
is_odd = False
bank = tile_bank
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
with open(tgp, "w") as outfile:
json.dump({"tiles": tg}, outfile, sort_keys=True, indent=4)
|
Add tool to fix IO tilegrid
Signed-off-by: David Shah <bfcdf3e6ca6cef45543bfbb57509c92aec9a39fb@ds0.me>import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data
This script fixes this by patching tile names
"""
for f, d in [("LIFCL", "LIFCL-40")]:
tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
with open(tgp, "r") as infile:
tg = json.load(infile)["tiles"]
tiles_by_xy = [[]]
max_row = 0
max_col = 0
for tile in sorted(tg.keys()):
r, c = tiles.pos_from_name(tile)
max_row = max(r, max_row)
max_col = max(c, max_col)
while r >= len(tiles_by_xy):
tiles_by_xy.append([])
while c >= len(tiles_by_xy[r]):
tiles_by_xy[r].append([])
tiles_by_xy[r][c].append(tile)
# Top tiles
is_odd = False
for col in tiles_by_xy[0]:
for tile in col:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
# Don't rename special or already-renamed tiles
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
# Left/right tiles
for tc in (0, max_col):
is_odd = False
bank = ""
for row in tiles_by_xy:
for tile in row[tc]:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
if tt.endswith("REM"):
continue
tile_bank = tt[tt.find("B")+1]
if tile_bank != bank:
is_odd = False
bank = tile_bank
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
with open(tgp, "w") as outfile:
json.dump({"tiles": tg}, outfile, sort_keys=True, indent=4)
|
<commit_before><commit_msg>Add tool to fix IO tilegrid
Signed-off-by: David Shah <bfcdf3e6ca6cef45543bfbb57509c92aec9a39fb@ds0.me><commit_after>import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data
This script fixes this by patching tile names
"""
for f, d in [("LIFCL", "LIFCL-40")]:
tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
with open(tgp, "r") as infile:
tg = json.load(infile)["tiles"]
tiles_by_xy = [[]]
max_row = 0
max_col = 0
for tile in sorted(tg.keys()):
r, c = tiles.pos_from_name(tile)
max_row = max(r, max_row)
max_col = max(c, max_col)
while r >= len(tiles_by_xy):
tiles_by_xy.append([])
while c >= len(tiles_by_xy[r]):
tiles_by_xy[r].append([])
tiles_by_xy[r][c].append(tile)
# Top tiles
is_odd = False
for col in tiles_by_xy[0]:
for tile in col:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
# Don't rename special or already-renamed tiles
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
# Left/right tiles
for tc in (0, max_col):
is_odd = False
bank = ""
for row in tiles_by_xy:
for tile in row[tc]:
tt = tiles.type_from_fullname(tile)
if not tt.startswith("SYSIO"):
continue
if tt.endswith("REM"):
continue
tile_bank = tt[tt.find("B")+1]
if tile_bank != bank:
is_odd = False
bank = tile_bank
if tt[-1].isdigit():
new_name = tile + ("_ODD" if is_odd else "_EVEN")
assert new_name not in tg
tg[new_name] = dict(tg[tile])
tg[new_name]["tiletype"] = tg[new_name]["tiletype"] + ("_ODD" if is_odd else "_EVEN")
del tg[tile]
is_odd = not is_odd
with open(tgp, "w") as outfile:
json.dump({"tiles": tg}, outfile, sort_keys=True, indent=4)
|
|
c0f06b64c15d74be26be2cd9e6d593e5c5cae2a9
|
tools/xml_select_minmax.py
|
tools/xml_select_minmax.py
|
#! /usr/bin/python3
import sys
import argparse
import xml_utils as u
import os
from argparse import RawTextHelpFormatter
##----------------------------------------------------------
## for each label that has more than the mininum count, select the
## largest subset less than the maxinum count.
## writes out to a new xml file.
##----------------------------------------------------------
def main (argv) :
parser = argparse.ArgumentParser(description='Select a subset if label count falls between min and max.',
formatter_class=RawTextHelpFormatter)
parser.add_argument ('image_db')
parser.add_argument ('min', default=0)
parser.add_argument ('max', default=0)
parser.add_argument ('files', nargs='+')
parser.add_argument ('-o', '-out', '--output')
parser.add_argument ('-filetype', '--filetype', default="chips",
help='Type of input file <images|faces|chips>. Defaults to "chips".')
parser.add_argument ('-v', '--verbosity', type=int, default=1,
choices=[0, 1, 2, 3], help='')
# help="increase output verbosity"
u.set_argv (argv)
args = parser.parse_args()
u.set_verbosity (args.verbosity)
u.set_argv (argv)
u.set_filetype ('faces')
verbose = 0
if verbose > 0:
print("files: ", args.files)
filetypes = ['images', 'chips', 'faces']
filetype = args.filetype
if filetype not in filetypes :
print('unrecognized filetype :', filetype, 'should be one of:', filetypes)
return
output_file = 'selected_' + u.current_datetime () + '.xml'
if args.output : # user specified
if not os.path.exists (args.output) :
output_file = args.output
else :
print ('output file exists, writing to', output_file)
u.select_labels_minmax (args.files, args.image_db, args.min, args.max, output_file, filetype)
if __name__ == "__main__":
main (sys.argv)
|
Select a subset for each label. If the label image count is greater than min, return the greatest number of images less than or equal to the max. The images will be evenly distributed across years, seasons and days.
|
Select a subset for each label. If the label image count is greater
than min, return the greatest number of images less than or equal to the max.
The images will be evenly distributed across years, seasons and days.
|
Python
|
mit
|
hypraptive/bearid,hypraptive/bearid,hypraptive/bearid
|
Select a subset for each label. If the label image count is greater
than min, return the greatest number of images less than or equal to the max.
The images will be evenly distributed across years, seasons and days.
|
#! /usr/bin/python3
import sys
import argparse
import xml_utils as u
import os
from argparse import RawTextHelpFormatter
##----------------------------------------------------------
## for each label that has more than the mininum count, select the
## largest subset less than the maxinum count.
## writes out to a new xml file.
##----------------------------------------------------------
def main (argv) :
parser = argparse.ArgumentParser(description='Select a subset if label count falls between min and max.',
formatter_class=RawTextHelpFormatter)
parser.add_argument ('image_db')
parser.add_argument ('min', default=0)
parser.add_argument ('max', default=0)
parser.add_argument ('files', nargs='+')
parser.add_argument ('-o', '-out', '--output')
parser.add_argument ('-filetype', '--filetype', default="chips",
help='Type of input file <images|faces|chips>. Defaults to "chips".')
parser.add_argument ('-v', '--verbosity', type=int, default=1,
choices=[0, 1, 2, 3], help='')
# help="increase output verbosity"
u.set_argv (argv)
args = parser.parse_args()
u.set_verbosity (args.verbosity)
u.set_argv (argv)
u.set_filetype ('faces')
verbose = 0
if verbose > 0:
print("files: ", args.files)
filetypes = ['images', 'chips', 'faces']
filetype = args.filetype
if filetype not in filetypes :
print('unrecognized filetype :', filetype, 'should be one of:', filetypes)
return
output_file = 'selected_' + u.current_datetime () + '.xml'
if args.output : # user specified
if not os.path.exists (args.output) :
output_file = args.output
else :
print ('output file exists, writing to', output_file)
u.select_labels_minmax (args.files, args.image_db, args.min, args.max, output_file, filetype)
if __name__ == "__main__":
main (sys.argv)
|
<commit_before><commit_msg>Select a subset for each label. If the label image count is greater
than min, return the greatest number of images less than or equal to the max.
The images will be evenly distributed across years, seasons and days.<commit_after>
|
#! /usr/bin/python3
import sys
import argparse
import xml_utils as u
import os
from argparse import RawTextHelpFormatter
##----------------------------------------------------------
## for each label that has more than the mininum count, select the
## largest subset less than the maxinum count.
## writes out to a new xml file.
##----------------------------------------------------------
def main (argv) :
parser = argparse.ArgumentParser(description='Select a subset if label count falls between min and max.',
formatter_class=RawTextHelpFormatter)
parser.add_argument ('image_db')
parser.add_argument ('min', default=0)
parser.add_argument ('max', default=0)
parser.add_argument ('files', nargs='+')
parser.add_argument ('-o', '-out', '--output')
parser.add_argument ('-filetype', '--filetype', default="chips",
help='Type of input file <images|faces|chips>. Defaults to "chips".')
parser.add_argument ('-v', '--verbosity', type=int, default=1,
choices=[0, 1, 2, 3], help='')
# help="increase output verbosity"
u.set_argv (argv)
args = parser.parse_args()
u.set_verbosity (args.verbosity)
u.set_argv (argv)
u.set_filetype ('faces')
verbose = 0
if verbose > 0:
print("files: ", args.files)
filetypes = ['images', 'chips', 'faces']
filetype = args.filetype
if filetype not in filetypes :
print('unrecognized filetype :', filetype, 'should be one of:', filetypes)
return
output_file = 'selected_' + u.current_datetime () + '.xml'
if args.output : # user specified
if not os.path.exists (args.output) :
output_file = args.output
else :
print ('output file exists, writing to', output_file)
u.select_labels_minmax (args.files, args.image_db, args.min, args.max, output_file, filetype)
if __name__ == "__main__":
main (sys.argv)
|
Select a subset for each label. If the label image count is greater
than min, return the greatest number of images less than or equal to the max.
The images will be evenly distributed across years, seasons and days.#! /usr/bin/python3
import sys
import argparse
import xml_utils as u
import os
from argparse import RawTextHelpFormatter
##----------------------------------------------------------
## for each label that has more than the mininum count, select the
## largest subset less than the maxinum count.
## writes out to a new xml file.
##----------------------------------------------------------
def main (argv) :
parser = argparse.ArgumentParser(description='Select a subset if label count falls between min and max.',
formatter_class=RawTextHelpFormatter)
parser.add_argument ('image_db')
parser.add_argument ('min', default=0)
parser.add_argument ('max', default=0)
parser.add_argument ('files', nargs='+')
parser.add_argument ('-o', '-out', '--output')
parser.add_argument ('-filetype', '--filetype', default="chips",
help='Type of input file <images|faces|chips>. Defaults to "chips".')
parser.add_argument ('-v', '--verbosity', type=int, default=1,
choices=[0, 1, 2, 3], help='')
# help="increase output verbosity"
u.set_argv (argv)
args = parser.parse_args()
u.set_verbosity (args.verbosity)
u.set_argv (argv)
u.set_filetype ('faces')
verbose = 0
if verbose > 0:
print("files: ", args.files)
filetypes = ['images', 'chips', 'faces']
filetype = args.filetype
if filetype not in filetypes :
print('unrecognized filetype :', filetype, 'should be one of:', filetypes)
return
output_file = 'selected_' + u.current_datetime () + '.xml'
if args.output : # user specified
if not os.path.exists (args.output) :
output_file = args.output
else :
print ('output file exists, writing to', output_file)
u.select_labels_minmax (args.files, args.image_db, args.min, args.max, output_file, filetype)
if __name__ == "__main__":
main (sys.argv)
|
<commit_before><commit_msg>Select a subset for each label. If the label image count is greater
than min, return the greatest number of images less than or equal to the max.
The images will be evenly distributed across years, seasons and days.<commit_after>#! /usr/bin/python3
import sys
import argparse
import xml_utils as u
import os
from argparse import RawTextHelpFormatter
##----------------------------------------------------------
## for each label that has more than the mininum count, select the
## largest subset less than the maxinum count.
## writes out to a new xml file.
##----------------------------------------------------------
def main (argv) :
parser = argparse.ArgumentParser(description='Select a subset if label count falls between min and max.',
formatter_class=RawTextHelpFormatter)
parser.add_argument ('image_db')
parser.add_argument ('min', default=0)
parser.add_argument ('max', default=0)
parser.add_argument ('files', nargs='+')
parser.add_argument ('-o', '-out', '--output')
parser.add_argument ('-filetype', '--filetype', default="chips",
help='Type of input file <images|faces|chips>. Defaults to "chips".')
parser.add_argument ('-v', '--verbosity', type=int, default=1,
choices=[0, 1, 2, 3], help='')
# help="increase output verbosity"
u.set_argv (argv)
args = parser.parse_args()
u.set_verbosity (args.verbosity)
u.set_argv (argv)
u.set_filetype ('faces')
verbose = 0
if verbose > 0:
print("files: ", args.files)
filetypes = ['images', 'chips', 'faces']
filetype = args.filetype
if filetype not in filetypes :
print('unrecognized filetype :', filetype, 'should be one of:', filetypes)
return
output_file = 'selected_' + u.current_datetime () + '.xml'
if args.output : # user specified
if not os.path.exists (args.output) :
output_file = args.output
else :
print ('output file exists, writing to', output_file)
u.select_labels_minmax (args.files, args.image_db, args.min, args.max, output_file, filetype)
if __name__ == "__main__":
main (sys.argv)
|
|
5e1c8f5bbe3cd8927b60a08b2784f7d2ea8263f2
|
src/backend/live_classify_local_camera.py
|
src/backend/live_classify_local_camera.py
|
import openface
import numpy as np
import os
import cv2
import pickle
id_name = ["Alec", "Greg", "Phong", "Emil"]
def classify(aligned_face, net, clf, le):
rep = net.forward(aligned_face)
predictions = clf.predict_proba(rep.reshape((1, len(rep)))).ravel()
maxI = np.argmax(predictions)
person = le.inverse_transform(maxI)
confidence = predictions[maxI]
print("Predict {} with {:.2f} confidence.".format(person, confidence))
return person
# path to the face alignment model
dLib_predictor = "../../resource/shaqpe_predictor_68_face_landmarks.dat"
# construct the face alignment model
align = openface.AlignDlib(dLib_predictor)
# path to deep neural network for feature representation
network_model = "../../resource/nn4.small2.v1.t7"
# construct the network for feature represenation
net = openface.TorchNeuralNet(network_model, 96)
classifier_model = "../../resource/svm.pkl"
with open(classifier_model, 'r') as f:
(le, clf) = pickle.load(f)
print("Successfully loaded SVM model")
video = cv2.VideoCapture(0)
if video is None:
exit()
while True:
# grab image
ret, cameraFrame = video.read()
if not ret:
exit()
try:
bbs = align.getAllFaceBoundingBoxes(cameraFrame)
print("Found {} face".format(len(bbs)))
for bb2 in bbs:
alignedFace = align.align(96, cameraFrame, bb2,
landmarkIndices=openface.AlignDlib.OUTER_EYES_AND_NOSE)
id = classify(alignedFace, net, clf, le)
person_name = id_name[id]
print(person_name)
rectColor = (0,255, 0)
textColor = (255, 0, 0)
face_top_left = (bb2.left(), bb2.top())
face_bottom_right = (bb2.right(), bb2.bottom())
cv2.rectangle(cameraFrame, face_top_left, face_bottom_right, rectColor)
cv2.putText(cameraFrame, person_name, face_top_left,
fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=textColor, thickness=2)
cv2.imshow('FaceRecognizer', cameraFrame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
cv2.imshow('FaceRecognize', cameraFrame)
continue
|
Use the model to classify face in local camera.
|
Use the model to classify face in local camera.
|
Python
|
apache-2.0
|
xphongvn/smart-attendance-system-ta,xphongvn/smart-attendance-system-ta,xphongvn/smart-attendance-system-ta
|
Use the model to classify face in local camera.
|
import openface
import numpy as np
import os
import cv2
import pickle
id_name = ["Alec", "Greg", "Phong", "Emil"]
def classify(aligned_face, net, clf, le):
rep = net.forward(aligned_face)
predictions = clf.predict_proba(rep.reshape((1, len(rep)))).ravel()
maxI = np.argmax(predictions)
person = le.inverse_transform(maxI)
confidence = predictions[maxI]
print("Predict {} with {:.2f} confidence.".format(person, confidence))
return person
# path to the face alignment model
dLib_predictor = "../../resource/shaqpe_predictor_68_face_landmarks.dat"
# construct the face alignment model
align = openface.AlignDlib(dLib_predictor)
# path to deep neural network for feature representation
network_model = "../../resource/nn4.small2.v1.t7"
# construct the network for feature represenation
net = openface.TorchNeuralNet(network_model, 96)
classifier_model = "../../resource/svm.pkl"
with open(classifier_model, 'r') as f:
(le, clf) = pickle.load(f)
print("Successfully loaded SVM model")
video = cv2.VideoCapture(0)
if video is None:
exit()
while True:
# grab image
ret, cameraFrame = video.read()
if not ret:
exit()
try:
bbs = align.getAllFaceBoundingBoxes(cameraFrame)
print("Found {} face".format(len(bbs)))
for bb2 in bbs:
alignedFace = align.align(96, cameraFrame, bb2,
landmarkIndices=openface.AlignDlib.OUTER_EYES_AND_NOSE)
id = classify(alignedFace, net, clf, le)
person_name = id_name[id]
print(person_name)
rectColor = (0,255, 0)
textColor = (255, 0, 0)
face_top_left = (bb2.left(), bb2.top())
face_bottom_right = (bb2.right(), bb2.bottom())
cv2.rectangle(cameraFrame, face_top_left, face_bottom_right, rectColor)
cv2.putText(cameraFrame, person_name, face_top_left,
fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=textColor, thickness=2)
cv2.imshow('FaceRecognizer', cameraFrame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
cv2.imshow('FaceRecognize', cameraFrame)
continue
|
<commit_before><commit_msg>Use the model to classify face in local camera.<commit_after>
|
import openface
import numpy as np
import os
import cv2
import pickle
id_name = ["Alec", "Greg", "Phong", "Emil"]
def classify(aligned_face, net, clf, le):
rep = net.forward(aligned_face)
predictions = clf.predict_proba(rep.reshape((1, len(rep)))).ravel()
maxI = np.argmax(predictions)
person = le.inverse_transform(maxI)
confidence = predictions[maxI]
print("Predict {} with {:.2f} confidence.".format(person, confidence))
return person
# path to the face alignment model
dLib_predictor = "../../resource/shaqpe_predictor_68_face_landmarks.dat"
# construct the face alignment model
align = openface.AlignDlib(dLib_predictor)
# path to deep neural network for feature representation
network_model = "../../resource/nn4.small2.v1.t7"
# construct the network for feature represenation
net = openface.TorchNeuralNet(network_model, 96)
classifier_model = "../../resource/svm.pkl"
with open(classifier_model, 'r') as f:
(le, clf) = pickle.load(f)
print("Successfully loaded SVM model")
video = cv2.VideoCapture(0)
if video is None:
exit()
while True:
# grab image
ret, cameraFrame = video.read()
if not ret:
exit()
try:
bbs = align.getAllFaceBoundingBoxes(cameraFrame)
print("Found {} face".format(len(bbs)))
for bb2 in bbs:
alignedFace = align.align(96, cameraFrame, bb2,
landmarkIndices=openface.AlignDlib.OUTER_EYES_AND_NOSE)
id = classify(alignedFace, net, clf, le)
person_name = id_name[id]
print(person_name)
rectColor = (0,255, 0)
textColor = (255, 0, 0)
face_top_left = (bb2.left(), bb2.top())
face_bottom_right = (bb2.right(), bb2.bottom())
cv2.rectangle(cameraFrame, face_top_left, face_bottom_right, rectColor)
cv2.putText(cameraFrame, person_name, face_top_left,
fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=textColor, thickness=2)
cv2.imshow('FaceRecognizer', cameraFrame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
cv2.imshow('FaceRecognize', cameraFrame)
continue
|
Use the model to classify face in local camera.import openface
import numpy as np
import os
import cv2
import pickle
id_name = ["Alec", "Greg", "Phong", "Emil"]
def classify(aligned_face, net, clf, le):
rep = net.forward(aligned_face)
predictions = clf.predict_proba(rep.reshape((1, len(rep)))).ravel()
maxI = np.argmax(predictions)
person = le.inverse_transform(maxI)
confidence = predictions[maxI]
print("Predict {} with {:.2f} confidence.".format(person, confidence))
return person
# path to the face alignment model
dLib_predictor = "../../resource/shaqpe_predictor_68_face_landmarks.dat"
# construct the face alignment model
align = openface.AlignDlib(dLib_predictor)
# path to deep neural network for feature representation
network_model = "../../resource/nn4.small2.v1.t7"
# construct the network for feature represenation
net = openface.TorchNeuralNet(network_model, 96)
classifier_model = "../../resource/svm.pkl"
with open(classifier_model, 'r') as f:
(le, clf) = pickle.load(f)
print("Successfully loaded SVM model")
video = cv2.VideoCapture(0)
if video is None:
exit()
while True:
# grab image
ret, cameraFrame = video.read()
if not ret:
exit()
try:
bbs = align.getAllFaceBoundingBoxes(cameraFrame)
print("Found {} face".format(len(bbs)))
for bb2 in bbs:
alignedFace = align.align(96, cameraFrame, bb2,
landmarkIndices=openface.AlignDlib.OUTER_EYES_AND_NOSE)
id = classify(alignedFace, net, clf, le)
person_name = id_name[id]
print(person_name)
rectColor = (0,255, 0)
textColor = (255, 0, 0)
face_top_left = (bb2.left(), bb2.top())
face_bottom_right = (bb2.right(), bb2.bottom())
cv2.rectangle(cameraFrame, face_top_left, face_bottom_right, rectColor)
cv2.putText(cameraFrame, person_name, face_top_left,
fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=textColor, thickness=2)
cv2.imshow('FaceRecognizer', cameraFrame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
cv2.imshow('FaceRecognize', cameraFrame)
continue
|
<commit_before><commit_msg>Use the model to classify face in local camera.<commit_after>import openface
import numpy as np
import os
import cv2
import pickle
id_name = ["Alec", "Greg", "Phong", "Emil"]
def classify(aligned_face, net, clf, le):
rep = net.forward(aligned_face)
predictions = clf.predict_proba(rep.reshape((1, len(rep)))).ravel()
maxI = np.argmax(predictions)
person = le.inverse_transform(maxI)
confidence = predictions[maxI]
print("Predict {} with {:.2f} confidence.".format(person, confidence))
return person
# path to the face alignment model
dLib_predictor = "../../resource/shaqpe_predictor_68_face_landmarks.dat"
# construct the face alignment model
align = openface.AlignDlib(dLib_predictor)
# path to deep neural network for feature representation
network_model = "../../resource/nn4.small2.v1.t7"
# construct the network for feature represenation
net = openface.TorchNeuralNet(network_model, 96)
classifier_model = "../../resource/svm.pkl"
with open(classifier_model, 'r') as f:
(le, clf) = pickle.load(f)
print("Successfully loaded SVM model")
video = cv2.VideoCapture(0)
if video is None:
exit()
while True:
# grab image
ret, cameraFrame = video.read()
if not ret:
exit()
try:
bbs = align.getAllFaceBoundingBoxes(cameraFrame)
print("Found {} face".format(len(bbs)))
for bb2 in bbs:
alignedFace = align.align(96, cameraFrame, bb2,
landmarkIndices=openface.AlignDlib.OUTER_EYES_AND_NOSE)
id = classify(alignedFace, net, clf, le)
person_name = id_name[id]
print(person_name)
rectColor = (0,255, 0)
textColor = (255, 0, 0)
face_top_left = (bb2.left(), bb2.top())
face_bottom_right = (bb2.right(), bb2.bottom())
cv2.rectangle(cameraFrame, face_top_left, face_bottom_right, rectColor)
cv2.putText(cameraFrame, person_name, face_top_left,
fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=textColor, thickness=2)
cv2.imshow('FaceRecognizer', cameraFrame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except:
cv2.imshow('FaceRecognize', cameraFrame)
continue
|
|
d2c83bf007a36a47754ec862d592c7c97b3145b9
|
plugins/callbacks/timer.py
|
plugins/callbacks/timer.py
|
import os
import datetime
from datetime import datetime, timedelta
class CallbackModule(object):
"""
This callback module tells you how long your plays ran for.
"""
start_time = datetime.now()
def __init__(self):
start_time = datetime.now()
print "Timer plugin is active."
def days_hours_minutes_seconds(self, timedelta):
minutes = (timedelta.seconds//60)%60
r_seconds = timedelta.seconds - (minutes * 60)
return timedelta.days, timedelta.seconds//3600, minutes, r_seconds
def playbook_on_stats(self, stats):
end_time = datetime.now()
timedelta = end_time - self.start_time
print "Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(timedelta))
|
Add simple plugin that times ansible-playbook runs.
|
Add simple plugin that times ansible-playbook runs.
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
Add simple plugin that times ansible-playbook runs.
|
import os
import datetime
from datetime import datetime, timedelta
class CallbackModule(object):
"""
This callback module tells you how long your plays ran for.
"""
start_time = datetime.now()
def __init__(self):
start_time = datetime.now()
print "Timer plugin is active."
def days_hours_minutes_seconds(self, timedelta):
minutes = (timedelta.seconds//60)%60
r_seconds = timedelta.seconds - (minutes * 60)
return timedelta.days, timedelta.seconds//3600, minutes, r_seconds
def playbook_on_stats(self, stats):
end_time = datetime.now()
timedelta = end_time - self.start_time
print "Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(timedelta))
|
<commit_before><commit_msg>Add simple plugin that times ansible-playbook runs.<commit_after>
|
import os
import datetime
from datetime import datetime, timedelta
class CallbackModule(object):
"""
This callback module tells you how long your plays ran for.
"""
start_time = datetime.now()
def __init__(self):
start_time = datetime.now()
print "Timer plugin is active."
def days_hours_minutes_seconds(self, timedelta):
minutes = (timedelta.seconds//60)%60
r_seconds = timedelta.seconds - (minutes * 60)
return timedelta.days, timedelta.seconds//3600, minutes, r_seconds
def playbook_on_stats(self, stats):
end_time = datetime.now()
timedelta = end_time - self.start_time
print "Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(timedelta))
|
Add simple plugin that times ansible-playbook runs.import os
import datetime
from datetime import datetime, timedelta
class CallbackModule(object):
"""
This callback module tells you how long your plays ran for.
"""
start_time = datetime.now()
def __init__(self):
start_time = datetime.now()
print "Timer plugin is active."
def days_hours_minutes_seconds(self, timedelta):
minutes = (timedelta.seconds//60)%60
r_seconds = timedelta.seconds - (minutes * 60)
return timedelta.days, timedelta.seconds//3600, minutes, r_seconds
def playbook_on_stats(self, stats):
end_time = datetime.now()
timedelta = end_time - self.start_time
print "Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(timedelta))
|
<commit_before><commit_msg>Add simple plugin that times ansible-playbook runs.<commit_after>import os
import datetime
from datetime import datetime, timedelta
class CallbackModule(object):
"""
This callback module tells you how long your plays ran for.
"""
start_time = datetime.now()
def __init__(self):
start_time = datetime.now()
print "Timer plugin is active."
def days_hours_minutes_seconds(self, timedelta):
minutes = (timedelta.seconds//60)%60
r_seconds = timedelta.seconds - (minutes * 60)
return timedelta.days, timedelta.seconds//3600, minutes, r_seconds
def playbook_on_stats(self, stats):
end_time = datetime.now()
timedelta = end_time - self.start_time
print "Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(timedelta))
|
|
f4e0254eada3a6dd3aaa794926c5cc82e993b180
|
setup/bin/swc-nano-installer.py
|
setup/bin/swc-nano-installer.py
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
Add a Nano installer for Windows
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor
|
Python
|
bsd-2-clause
|
selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor<commit_after>
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor<commit_after>#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
|
910793402dccc02c853618f93917c5f42c42a3da
|
GatewayMonitor/GatewayMonitorService.py
|
GatewayMonitor/GatewayMonitorService.py
|
#
# A sample service to be 'compiled' into an exe-file with py2exe.
#
# See also
# setup.py - the distutils' setup script
# setup.cfg - the distutils' config file for this
# README.txt - detailed usage notes
#
# A minimal service, doing nothing else than
# - write 'start' and 'stop' entries into the NT event log
# - when started, waits to be stopped again.
#
import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import gatewaymonitor.GatewayMonitor
class GatewayMonitorService(win32serviceutil.ServiceFramework):
_svc_name_ = "GatewayMonitorService"
_svc_display_name_ = "AMMO Gateway Monitor Service"
_svc_deps_ = ["EventLog"]
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
import servicemanager
mon = gatewaymonitor.GatewayMonitor.GatewayMonitor()
mon.start()
# Write a 'started' event to the event log...
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STARTED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
# wait for being stopped...
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
mon.cancel()
mon.join()
# and write a 'stopped' event to the event log.
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STOPPED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
if __name__ == '__main__':
# Note that this code will not be run in the 'frozen' exe-file!!!
win32serviceutil.HandleCommandLine(GatewayMonitorService)
|
Add gateway monitor Windows service
|
Add gateway monitor Windows service
|
Python
|
mit
|
isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway,isis-ammo/ammo-gateway
|
Add gateway monitor Windows service
|
#
# A sample service to be 'compiled' into an exe-file with py2exe.
#
# See also
# setup.py - the distutils' setup script
# setup.cfg - the distutils' config file for this
# README.txt - detailed usage notes
#
# A minimal service, doing nothing else than
# - write 'start' and 'stop' entries into the NT event log
# - when started, waits to be stopped again.
#
import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import gatewaymonitor.GatewayMonitor
class GatewayMonitorService(win32serviceutil.ServiceFramework):
_svc_name_ = "GatewayMonitorService"
_svc_display_name_ = "AMMO Gateway Monitor Service"
_svc_deps_ = ["EventLog"]
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
import servicemanager
mon = gatewaymonitor.GatewayMonitor.GatewayMonitor()
mon.start()
# Write a 'started' event to the event log...
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STARTED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
# wait for being stopped...
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
mon.cancel()
mon.join()
# and write a 'stopped' event to the event log.
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STOPPED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
if __name__ == '__main__':
# Note that this code will not be run in the 'frozen' exe-file!!!
win32serviceutil.HandleCommandLine(GatewayMonitorService)
|
<commit_before><commit_msg>Add gateway monitor Windows service<commit_after>
|
#
# A sample service to be 'compiled' into an exe-file with py2exe.
#
# See also
# setup.py - the distutils' setup script
# setup.cfg - the distutils' config file for this
# README.txt - detailed usage notes
#
# A minimal service, doing nothing else than
# - write 'start' and 'stop' entries into the NT event log
# - when started, waits to be stopped again.
#
import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import gatewaymonitor.GatewayMonitor
class GatewayMonitorService(win32serviceutil.ServiceFramework):
_svc_name_ = "GatewayMonitorService"
_svc_display_name_ = "AMMO Gateway Monitor Service"
_svc_deps_ = ["EventLog"]
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
import servicemanager
mon = gatewaymonitor.GatewayMonitor.GatewayMonitor()
mon.start()
# Write a 'started' event to the event log...
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STARTED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
# wait for being stopped...
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
mon.cancel()
mon.join()
# and write a 'stopped' event to the event log.
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STOPPED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
if __name__ == '__main__':
# Note that this code will not be run in the 'frozen' exe-file!!!
win32serviceutil.HandleCommandLine(GatewayMonitorService)
|
Add gateway monitor Windows service#
# A sample service to be 'compiled' into an exe-file with py2exe.
#
# See also
# setup.py - the distutils' setup script
# setup.cfg - the distutils' config file for this
# README.txt - detailed usage notes
#
# A minimal service, doing nothing else than
# - write 'start' and 'stop' entries into the NT event log
# - when started, waits to be stopped again.
#
import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import gatewaymonitor.GatewayMonitor
class GatewayMonitorService(win32serviceutil.ServiceFramework):
_svc_name_ = "GatewayMonitorService"
_svc_display_name_ = "AMMO Gateway Monitor Service"
_svc_deps_ = ["EventLog"]
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
import servicemanager
mon = gatewaymonitor.GatewayMonitor.GatewayMonitor()
mon.start()
# Write a 'started' event to the event log...
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STARTED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
# wait for being stopped...
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
mon.cancel()
mon.join()
# and write a 'stopped' event to the event log.
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STOPPED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
if __name__ == '__main__':
# Note that this code will not be run in the 'frozen' exe-file!!!
win32serviceutil.HandleCommandLine(GatewayMonitorService)
|
<commit_before><commit_msg>Add gateway monitor Windows service<commit_after>#
# A sample service to be 'compiled' into an exe-file with py2exe.
#
# See also
# setup.py - the distutils' setup script
# setup.cfg - the distutils' config file for this
# README.txt - detailed usage notes
#
# A minimal service, doing nothing else than
# - write 'start' and 'stop' entries into the NT event log
# - when started, waits to be stopped again.
#
import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import gatewaymonitor.GatewayMonitor
class GatewayMonitorService(win32serviceutil.ServiceFramework):
_svc_name_ = "GatewayMonitorService"
_svc_display_name_ = "AMMO Gateway Monitor Service"
_svc_deps_ = ["EventLog"]
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
import servicemanager
mon = gatewaymonitor.GatewayMonitor.GatewayMonitor()
mon.start()
# Write a 'started' event to the event log...
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STARTED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
# wait for being stopped...
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
mon.cancel()
mon.join()
# and write a 'stopped' event to the event log.
win32evtlogutil.ReportEvent(self._svc_name_,
servicemanager.PYS_SERVICE_STOPPED,
0, # category
servicemanager.EVENTLOG_INFORMATION_TYPE,
(self._svc_name_, ''))
if __name__ == '__main__':
# Note that this code will not be run in the 'frozen' exe-file!!!
win32serviceutil.HandleCommandLine(GatewayMonitorService)
|
|
c73b8a7503f21e16171ea1b0b40180bd1624f4d3
|
social/apps/flask_app/routes.py
|
social/apps/flask_app/routes.py
|
from flask import g, Blueprint
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=lambda strat, user: login_user(user),
user=g.user, *args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
|
from flask import g, Blueprint, request
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=do_login, user=g.user,
*args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
def do_login(strategy, user):
return login_user(user, remember=request.cookies.get('remember') or
request.args.get('remember') or
request.form.get('remember') or False)
|
Support remember flag when calling login on flask app
|
Support remember flag when calling login on flask app
|
Python
|
bsd-3-clause
|
lamby/python-social-auth,lneoe/python-social-auth,henocdz/python-social-auth,ononeor12/python-social-auth,cjltsod/python-social-auth,JJediny/python-social-auth,henocdz/python-social-auth,henocdz/python-social-auth,ariestiyansyah/python-social-auth,mathspace/python-social-auth,rsteca/python-social-auth,jneves/python-social-auth,tutumcloud/python-social-auth,alrusdi/python-social-auth,mrwags/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,san-mate/python-social-auth,yprez/python-social-auth,S01780/python-social-auth,python-social-auth/social-core,frankier/python-social-auth,ByteInternet/python-social-auth,jeyraof/python-social-auth,joelstanner/python-social-auth,rsalmaso/python-social-auth,webjunkie/python-social-auth,bjorand/python-social-auth,tutumcloud/python-social-auth,jameslittle/python-social-auth,Andygmb/python-social-auth,S01780/python-social-auth,iruga090/python-social-auth,lawrence34/python-social-auth,robbiet480/python-social-auth,degs098/python-social-auth,cjltsod/python-social-auth,DhiaEddineSaidi/python-social-auth,MSOpenTech/python-social-auth,merutak/python-social-auth,mchdks/python-social-auth,tkajtoch/python-social-auth,rsalmaso/python-social-auth,hsr-ba-fs15-dat/python-social-auth,degs098/python-social-auth,ByteInternet/python-social-auth,jeyraof/python-social-auth,degs098/python-social-auth,hsr-ba-fs15-dat/python-social-auth,frankier/python-social-auth,yprez/python-social-auth,SeanHayes/python-social-auth,alrusdi/python-social-auth,fearlessspider/python-social-auth,drxos/python-social-auth,joelstanner/python-social-auth,python-social-auth/social-app-django,JJediny/python-social-auth,jameslittle/python-social-auth,contracode/python-social-auth,wildtetris/python-social-auth,michael-borisov/python-social-auth,hsr-ba-fs15-dat/python-social-auth,imsparsh/python-social-auth,webjunkie/python-social-auth,contracode/python-social-auth,nirmalvp/python-social-auth,chandolia/python-social-auth,duoduo369/python-social-auth,falcon1kr/python-social-auth,SeanHayes/python-social-auth,clef/python-social-auth,mrwags/python-social-auth,imsparsh/python-social-auth,joelstanner/python-social-auth,falcon1kr/python-social-auth,mathspace/python-social-auth,robbiet480/python-social-auth,ariestiyansyah/python-social-auth,iruga090/python-social-auth,mark-adams/python-social-auth,chandolia/python-social-auth,mrwags/python-social-auth,cmichal/python-social-auth,lneoe/python-social-auth,python-social-auth/social-app-django,garrett-schlesinger/python-social-auth,ByteInternet/python-social-auth,JJediny/python-social-auth,JerzySpendel/python-social-auth,cmichal/python-social-auth,drxos/python-social-auth,yprez/python-social-auth,fearlessspider/python-social-auth,contracode/python-social-auth,jneves/python-social-auth,MSOpenTech/python-social-auth,nirmalvp/python-social-auth,tkajtoch/python-social-auth,python-social-auth/social-app-cherrypy,mark-adams/python-social-auth,mathspace/python-social-auth,muhammad-ammar/python-social-auth,barseghyanartur/python-social-auth,lawrence34/python-social-auth,S01780/python-social-auth,msampathkumar/python-social-auth,Andygmb/python-social-auth,python-social-auth/social-app-django,noodle-learns-programming/python-social-auth,mark-adams/python-social-auth,merutak/python-social-auth,san-mate/python-social-auth,daniula/python-social-auth,bjorand/python-social-auth,ononeor12/python-social-auth,daniula/python-social-auth,clef/python-social-auth,drxos/python-social-auth,robbiet480/python-social-auth,jeyraof/python-social-auth,falcon1kr/python-social-auth,lamby/python-social-auth,MSOpenTech/python-social-auth,firstjob/python-social-auth,VishvajitP/python-social-auth,VishvajitP/python-social-auth,chandolia/python-social-auth,rsteca/python-social-auth,Andygmb/python-social-auth,VishvajitP/python-social-auth,python-social-auth/social-core,JerzySpendel/python-social-auth,barseghyanartur/python-social-auth,webjunkie/python-social-auth,DhiaEddineSaidi/python-social-auth,wildtetris/python-social-auth,lamby/python-social-auth,firstjob/python-social-auth,msampathkumar/python-social-auth,clef/python-social-auth,lneoe/python-social-auth,michael-borisov/python-social-auth,muhammad-ammar/python-social-auth,jameslittle/python-social-auth,bjorand/python-social-auth,daniula/python-social-auth,firstjob/python-social-auth,tobias47n9e/social-core,rsteca/python-social-auth,lawrence34/python-social-auth,michael-borisov/python-social-auth,DhiaEddineSaidi/python-social-auth,nirmalvp/python-social-auth,cmichal/python-social-auth,duoduo369/python-social-auth,noodle-learns-programming/python-social-auth,merutak/python-social-auth,iruga090/python-social-auth,ononeor12/python-social-auth,msampathkumar/python-social-auth,noodle-learns-programming/python-social-auth,python-social-auth/social-storage-sqlalchemy,python-social-auth/social-docs,fearlessspider/python-social-auth,mchdks/python-social-auth,mchdks/python-social-auth,ariestiyansyah/python-social-auth,jneves/python-social-auth,alrusdi/python-social-auth,garrett-schlesinger/python-social-auth,muhammad-ammar/python-social-auth,imsparsh/python-social-auth,san-mate/python-social-auth,wildtetris/python-social-auth,JerzySpendel/python-social-auth
|
from flask import g, Blueprint
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=lambda strat, user: login_user(user),
user=g.user, *args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
Support remember flag when calling login on flask app
|
from flask import g, Blueprint, request
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=do_login, user=g.user,
*args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
def do_login(strategy, user):
return login_user(user, remember=request.cookies.get('remember') or
request.args.get('remember') or
request.form.get('remember') or False)
|
<commit_before>from flask import g, Blueprint
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=lambda strat, user: login_user(user),
user=g.user, *args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
<commit_msg>Support remember flag when calling login on flask app<commit_after>
|
from flask import g, Blueprint, request
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=do_login, user=g.user,
*args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
def do_login(strategy, user):
return login_user(user, remember=request.cookies.get('remember') or
request.args.get('remember') or
request.form.get('remember') or False)
|
from flask import g, Blueprint
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=lambda strat, user: login_user(user),
user=g.user, *args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
Support remember flag when calling login on flask appfrom flask import g, Blueprint, request
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=do_login, user=g.user,
*args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
def do_login(strategy, user):
return login_user(user, remember=request.cookies.get('remember') or
request.args.get('remember') or
request.form.get('remember') or False)
|
<commit_before>from flask import g, Blueprint
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=lambda strat, user: login_user(user),
user=g.user, *args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
<commit_msg>Support remember flag when calling login on flask app<commit_after>from flask import g, Blueprint, request
from flask.ext.login import login_required, login_user
from social.actions import do_auth, do_complete, do_disconnect
from social.apps.flask_app.utils import strategy
social_auth = Blueprint('social', __name__)
@social_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def auth(backend):
return do_auth(g.strategy)
@social_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@strategy('social.complete')
def complete(backend, *args, **kwargs):
"""Authentication complete view, override this view if transaction
management doesn't suit your needs."""
return do_complete(g.strategy, login=do_login, user=g.user,
*args, **kwargs)
@social_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@social_auth.route('/disconnect/<string:backend>/<int:association_id>/',
methods=('POST',))
@login_required
@strategy()
def disconnect(backend, association_id=None):
"""Disconnects given backend from current logged in user."""
return do_disconnect(g.strategy, g.user, association_id)
def do_login(strategy, user):
return login_user(user, remember=request.cookies.get('remember') or
request.args.get('remember') or
request.form.get('remember') or False)
|
50b2dccd24c756f5f09e6b9f4e2442abeed7f5c7
|
st2api/tests/unit/controllers/v1/test_alias_execution_rbac.py
|
st2api/tests/unit/controllers/v1/test_alias_execution_rbac.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.services import action as action_service
from st2tests.fixturesloader import FixturesLoader
from tests.base import APIControllerWithRBACTestCase
from tests.unit.controllers.v1.test_alias_execution import DummyActionExecution
FIXTURES_PACK = 'aliases'
TEST_MODELS = {
'aliases': ['alias1.yaml', 'alias2.yaml'],
'actions': ['action1.yaml'],
'runners': ['runner1.yaml']
}
TEST_LOAD_MODELS = {
'aliases': ['alias3.yaml']
}
__all__ = [
'AliasExecutionWithRBACTestCase'
]
class AliasExecutionWithRBACTestCase(APIControllerWithRBACTestCase):
def setUp(self):
super(AliasExecutionWithRBACTestCase, self).setUp()
self.models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
self.alias1 = self.models['aliases']['alias1.yaml']
self.alias2 = self.models['aliases']['alias2.yaml']
@mock.patch.object(action_service, 'request',
return_value=(None, DummyActionExecution(id_=1)))
def test_live_action_context_user_is_set_to_authenticated_user(self, request):
# Verify that the user inside the context of live action is set to authenticated user
# which hit the endpoint. This is important for RBAC and many other things.
user_db = self.users['admin']
self.use_user(user_db)
command = 'Lorem ipsum value1 dolor sit "value2, value3" amet.'
post_resp = self._do_post(alias_execution=self.alias2, command=command)
self.assertEqual(post_resp.status_int, 200)
live_action_db = request.call_args[0][0]
self.assertEquals(live_action_db.context['user'], 'admin')
def _do_post(self, alias_execution, command, expect_errors=False):
execution = {'name': alias_execution.name,
'format': alias_execution.formats[0],
'command': command,
'user': 'stanley',
'source_channel': 'test',
'notification_route': 'test'}
return self.app.post_json('/v1/aliasexecution', execution,
expect_errors=expect_errors)
|
Add a regression test case for alias execution and live action context user.
|
Add a regression test case for alias execution and live action context user.
Verify that the user inside the context is correctly set to the authenticated
user which triggered the alias execution.
|
Python
|
apache-2.0
|
Plexxi/st2,StackStorm/st2,lakshmi-kannan/st2,nzlosh/st2,pixelrebel/st2,dennybaa/st2,pixelrebel/st2,pixelrebel/st2,peak6/st2,StackStorm/st2,peak6/st2,punalpatel/st2,dennybaa/st2,lakshmi-kannan/st2,armab/st2,emedvedev/st2,tonybaloney/st2,dennybaa/st2,tonybaloney/st2,armab/st2,lakshmi-kannan/st2,punalpatel/st2,peak6/st2,Plexxi/st2,nzlosh/st2,armab/st2,nzlosh/st2,emedvedev/st2,Plexxi/st2,StackStorm/st2,punalpatel/st2,nzlosh/st2,emedvedev/st2,Plexxi/st2,tonybaloney/st2,StackStorm/st2
|
Add a regression test case for alias execution and live action context user.
Verify that the user inside the context is correctly set to the authenticated
user which triggered the alias execution.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.services import action as action_service
from st2tests.fixturesloader import FixturesLoader
from tests.base import APIControllerWithRBACTestCase
from tests.unit.controllers.v1.test_alias_execution import DummyActionExecution
FIXTURES_PACK = 'aliases'
TEST_MODELS = {
'aliases': ['alias1.yaml', 'alias2.yaml'],
'actions': ['action1.yaml'],
'runners': ['runner1.yaml']
}
TEST_LOAD_MODELS = {
'aliases': ['alias3.yaml']
}
__all__ = [
'AliasExecutionWithRBACTestCase'
]
class AliasExecutionWithRBACTestCase(APIControllerWithRBACTestCase):
def setUp(self):
super(AliasExecutionWithRBACTestCase, self).setUp()
self.models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
self.alias1 = self.models['aliases']['alias1.yaml']
self.alias2 = self.models['aliases']['alias2.yaml']
@mock.patch.object(action_service, 'request',
return_value=(None, DummyActionExecution(id_=1)))
def test_live_action_context_user_is_set_to_authenticated_user(self, request):
# Verify that the user inside the context of live action is set to authenticated user
# which hit the endpoint. This is important for RBAC and many other things.
user_db = self.users['admin']
self.use_user(user_db)
command = 'Lorem ipsum value1 dolor sit "value2, value3" amet.'
post_resp = self._do_post(alias_execution=self.alias2, command=command)
self.assertEqual(post_resp.status_int, 200)
live_action_db = request.call_args[0][0]
self.assertEquals(live_action_db.context['user'], 'admin')
def _do_post(self, alias_execution, command, expect_errors=False):
execution = {'name': alias_execution.name,
'format': alias_execution.formats[0],
'command': command,
'user': 'stanley',
'source_channel': 'test',
'notification_route': 'test'}
return self.app.post_json('/v1/aliasexecution', execution,
expect_errors=expect_errors)
|
<commit_before><commit_msg>Add a regression test case for alias execution and live action context user.
Verify that the user inside the context is correctly set to the authenticated
user which triggered the alias execution.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.services import action as action_service
from st2tests.fixturesloader import FixturesLoader
from tests.base import APIControllerWithRBACTestCase
from tests.unit.controllers.v1.test_alias_execution import DummyActionExecution
FIXTURES_PACK = 'aliases'
TEST_MODELS = {
'aliases': ['alias1.yaml', 'alias2.yaml'],
'actions': ['action1.yaml'],
'runners': ['runner1.yaml']
}
TEST_LOAD_MODELS = {
'aliases': ['alias3.yaml']
}
__all__ = [
'AliasExecutionWithRBACTestCase'
]
class AliasExecutionWithRBACTestCase(APIControllerWithRBACTestCase):
def setUp(self):
super(AliasExecutionWithRBACTestCase, self).setUp()
self.models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
self.alias1 = self.models['aliases']['alias1.yaml']
self.alias2 = self.models['aliases']['alias2.yaml']
@mock.patch.object(action_service, 'request',
return_value=(None, DummyActionExecution(id_=1)))
def test_live_action_context_user_is_set_to_authenticated_user(self, request):
# Verify that the user inside the context of live action is set to authenticated user
# which hit the endpoint. This is important for RBAC and many other things.
user_db = self.users['admin']
self.use_user(user_db)
command = 'Lorem ipsum value1 dolor sit "value2, value3" amet.'
post_resp = self._do_post(alias_execution=self.alias2, command=command)
self.assertEqual(post_resp.status_int, 200)
live_action_db = request.call_args[0][0]
self.assertEquals(live_action_db.context['user'], 'admin')
def _do_post(self, alias_execution, command, expect_errors=False):
execution = {'name': alias_execution.name,
'format': alias_execution.formats[0],
'command': command,
'user': 'stanley',
'source_channel': 'test',
'notification_route': 'test'}
return self.app.post_json('/v1/aliasexecution', execution,
expect_errors=expect_errors)
|
Add a regression test case for alias execution and live action context user.
Verify that the user inside the context is correctly set to the authenticated
user which triggered the alias execution.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.services import action as action_service
from st2tests.fixturesloader import FixturesLoader
from tests.base import APIControllerWithRBACTestCase
from tests.unit.controllers.v1.test_alias_execution import DummyActionExecution
FIXTURES_PACK = 'aliases'
TEST_MODELS = {
'aliases': ['alias1.yaml', 'alias2.yaml'],
'actions': ['action1.yaml'],
'runners': ['runner1.yaml']
}
TEST_LOAD_MODELS = {
'aliases': ['alias3.yaml']
}
__all__ = [
'AliasExecutionWithRBACTestCase'
]
class AliasExecutionWithRBACTestCase(APIControllerWithRBACTestCase):
def setUp(self):
super(AliasExecutionWithRBACTestCase, self).setUp()
self.models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
self.alias1 = self.models['aliases']['alias1.yaml']
self.alias2 = self.models['aliases']['alias2.yaml']
@mock.patch.object(action_service, 'request',
return_value=(None, DummyActionExecution(id_=1)))
def test_live_action_context_user_is_set_to_authenticated_user(self, request):
# Verify that the user inside the context of live action is set to authenticated user
# which hit the endpoint. This is important for RBAC and many other things.
user_db = self.users['admin']
self.use_user(user_db)
command = 'Lorem ipsum value1 dolor sit "value2, value3" amet.'
post_resp = self._do_post(alias_execution=self.alias2, command=command)
self.assertEqual(post_resp.status_int, 200)
live_action_db = request.call_args[0][0]
self.assertEquals(live_action_db.context['user'], 'admin')
def _do_post(self, alias_execution, command, expect_errors=False):
execution = {'name': alias_execution.name,
'format': alias_execution.formats[0],
'command': command,
'user': 'stanley',
'source_channel': 'test',
'notification_route': 'test'}
return self.app.post_json('/v1/aliasexecution', execution,
expect_errors=expect_errors)
|
<commit_before><commit_msg>Add a regression test case for alias execution and live action context user.
Verify that the user inside the context is correctly set to the authenticated
user which triggered the alias execution.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.services import action as action_service
from st2tests.fixturesloader import FixturesLoader
from tests.base import APIControllerWithRBACTestCase
from tests.unit.controllers.v1.test_alias_execution import DummyActionExecution
FIXTURES_PACK = 'aliases'
TEST_MODELS = {
'aliases': ['alias1.yaml', 'alias2.yaml'],
'actions': ['action1.yaml'],
'runners': ['runner1.yaml']
}
TEST_LOAD_MODELS = {
'aliases': ['alias3.yaml']
}
__all__ = [
'AliasExecutionWithRBACTestCase'
]
class AliasExecutionWithRBACTestCase(APIControllerWithRBACTestCase):
def setUp(self):
super(AliasExecutionWithRBACTestCase, self).setUp()
self.models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
self.alias1 = self.models['aliases']['alias1.yaml']
self.alias2 = self.models['aliases']['alias2.yaml']
@mock.patch.object(action_service, 'request',
return_value=(None, DummyActionExecution(id_=1)))
def test_live_action_context_user_is_set_to_authenticated_user(self, request):
# Verify that the user inside the context of live action is set to authenticated user
# which hit the endpoint. This is important for RBAC and many other things.
user_db = self.users['admin']
self.use_user(user_db)
command = 'Lorem ipsum value1 dolor sit "value2, value3" amet.'
post_resp = self._do_post(alias_execution=self.alias2, command=command)
self.assertEqual(post_resp.status_int, 200)
live_action_db = request.call_args[0][0]
self.assertEquals(live_action_db.context['user'], 'admin')
def _do_post(self, alias_execution, command, expect_errors=False):
execution = {'name': alias_execution.name,
'format': alias_execution.formats[0],
'command': command,
'user': 'stanley',
'source_channel': 'test',
'notification_route': 'test'}
return self.app.post_json('/v1/aliasexecution', execution,
expect_errors=expect_errors)
|
|
b1083460378166b23bfe379dfb228d4cf5236255
|
app/grandchallenge/profiles/utils.py
|
app/grandchallenge/profiles/utils.py
|
from django.conf import settings
from grandchallenge.subdomains.utils import reverse
def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to redirect to
the homepage. Returns a string defining the URI to go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect and settings.LOGOUT_URL not in redirect:
return redirect
elif user is not None:
return reverse("profile_redirect")
else:
return reverse("home")
|
Create method for redirect after signin
|
Create method for redirect after signin
|
Python
|
apache-2.0
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
Create method for redirect after signin
|
from django.conf import settings
from grandchallenge.subdomains.utils import reverse
def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to redirect to
the homepage. Returns a string defining the URI to go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect and settings.LOGOUT_URL not in redirect:
return redirect
elif user is not None:
return reverse("profile_redirect")
else:
return reverse("home")
|
<commit_before><commit_msg>Create method for redirect after signin<commit_after>
|
from django.conf import settings
from grandchallenge.subdomains.utils import reverse
def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to redirect to
the homepage. Returns a string defining the URI to go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect and settings.LOGOUT_URL not in redirect:
return redirect
elif user is not None:
return reverse("profile_redirect")
else:
return reverse("home")
|
Create method for redirect after signinfrom django.conf import settings
from grandchallenge.subdomains.utils import reverse
def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to redirect to
the homepage. Returns a string defining the URI to go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect and settings.LOGOUT_URL not in redirect:
return redirect
elif user is not None:
return reverse("profile_redirect")
else:
return reverse("home")
|
<commit_before><commit_msg>Create method for redirect after signin<commit_after>from django.conf import settings
from grandchallenge.subdomains.utils import reverse
def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to redirect to
the homepage. Returns a string defining the URI to go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect and settings.LOGOUT_URL not in redirect:
return redirect
elif user is not None:
return reverse("profile_redirect")
else:
return reverse("home")
|
|
64f820b18190034a2285e996cd2f66fc100c2f54
|
cogbot/extensions/jira.py
|
cogbot/extensions/jira.py
|
import re
import urllib.parse
from discord.ext import commands
from discord.ext.commands import Context
from cogbot.cog_bot import CogBot
class Jira:
REPORT_PATTERN = re.compile('^(mc-)?(\d+)$', re.IGNORECASE)
def __init__(self, bot: CogBot, ext: str):
self.bot = bot
@commands.command(pass_context=True)
async def jira(self, ctx: Context, *, query: str):
rmatch = self.REPORT_PATTERN.match(query)
if rmatch:
rgroups = rmatch.groups()
report_no = rgroups[1]
url = 'https://bugs.mojang.com/browse/MC-' + report_no
else:
search_url = urllib.parse.urlencode({'searchString': query})
url = 'https://bugs.mojang.com/secure/QuickSearch.jspa?' + search_url
await self.bot.say(url)
await self.bot.react_success(ctx)
def setup(bot):
bot.add_cog(Jira(bot, __name__))
|
Add extension to search JIRA for bug reports
|
Add extension to search JIRA for bug reports
|
Python
|
mit
|
Arcensoth/cogbot
|
Add extension to search JIRA for bug reports
|
import re
import urllib.parse
from discord.ext import commands
from discord.ext.commands import Context
from cogbot.cog_bot import CogBot
class Jira:
REPORT_PATTERN = re.compile('^(mc-)?(\d+)$', re.IGNORECASE)
def __init__(self, bot: CogBot, ext: str):
self.bot = bot
@commands.command(pass_context=True)
async def jira(self, ctx: Context, *, query: str):
rmatch = self.REPORT_PATTERN.match(query)
if rmatch:
rgroups = rmatch.groups()
report_no = rgroups[1]
url = 'https://bugs.mojang.com/browse/MC-' + report_no
else:
search_url = urllib.parse.urlencode({'searchString': query})
url = 'https://bugs.mojang.com/secure/QuickSearch.jspa?' + search_url
await self.bot.say(url)
await self.bot.react_success(ctx)
def setup(bot):
bot.add_cog(Jira(bot, __name__))
|
<commit_before><commit_msg>Add extension to search JIRA for bug reports<commit_after>
|
import re
import urllib.parse
from discord.ext import commands
from discord.ext.commands import Context
from cogbot.cog_bot import CogBot
class Jira:
REPORT_PATTERN = re.compile('^(mc-)?(\d+)$', re.IGNORECASE)
def __init__(self, bot: CogBot, ext: str):
self.bot = bot
@commands.command(pass_context=True)
async def jira(self, ctx: Context, *, query: str):
rmatch = self.REPORT_PATTERN.match(query)
if rmatch:
rgroups = rmatch.groups()
report_no = rgroups[1]
url = 'https://bugs.mojang.com/browse/MC-' + report_no
else:
search_url = urllib.parse.urlencode({'searchString': query})
url = 'https://bugs.mojang.com/secure/QuickSearch.jspa?' + search_url
await self.bot.say(url)
await self.bot.react_success(ctx)
def setup(bot):
bot.add_cog(Jira(bot, __name__))
|
Add extension to search JIRA for bug reportsimport re
import urllib.parse
from discord.ext import commands
from discord.ext.commands import Context
from cogbot.cog_bot import CogBot
class Jira:
REPORT_PATTERN = re.compile('^(mc-)?(\d+)$', re.IGNORECASE)
def __init__(self, bot: CogBot, ext: str):
self.bot = bot
@commands.command(pass_context=True)
async def jira(self, ctx: Context, *, query: str):
rmatch = self.REPORT_PATTERN.match(query)
if rmatch:
rgroups = rmatch.groups()
report_no = rgroups[1]
url = 'https://bugs.mojang.com/browse/MC-' + report_no
else:
search_url = urllib.parse.urlencode({'searchString': query})
url = 'https://bugs.mojang.com/secure/QuickSearch.jspa?' + search_url
await self.bot.say(url)
await self.bot.react_success(ctx)
def setup(bot):
bot.add_cog(Jira(bot, __name__))
|
<commit_before><commit_msg>Add extension to search JIRA for bug reports<commit_after>import re
import urllib.parse
from discord.ext import commands
from discord.ext.commands import Context
from cogbot.cog_bot import CogBot
class Jira:
REPORT_PATTERN = re.compile('^(mc-)?(\d+)$', re.IGNORECASE)
def __init__(self, bot: CogBot, ext: str):
self.bot = bot
@commands.command(pass_context=True)
async def jira(self, ctx: Context, *, query: str):
rmatch = self.REPORT_PATTERN.match(query)
if rmatch:
rgroups = rmatch.groups()
report_no = rgroups[1]
url = 'https://bugs.mojang.com/browse/MC-' + report_no
else:
search_url = urllib.parse.urlencode({'searchString': query})
url = 'https://bugs.mojang.com/secure/QuickSearch.jspa?' + search_url
await self.bot.say(url)
await self.bot.react_success(ctx)
def setup(bot):
bot.add_cog(Jira(bot, __name__))
|
|
201e63722b102ee453610a955e3014d3c772a8fa
|
ceph_deploy/tests/parser/test_purge.py
|
ceph_deploy/tests/parser/test_purge.py
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurge(object):
def setup(self):
self.parser = get_parser()
def test_purge_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purge' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purge_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purge_one_host(self):
args = self.parser.parse_args('purge host1'.split())
assert args.host == ['host1']
def test_purge_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purge'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
Add tests for argparse purge
|
[RM-11742] Add tests for argparse purge
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
Python
|
mit
|
osynge/ceph-deploy,codenrhoden/ceph-deploy,Vicente-Cheng/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,imzhulei/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,ceph/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,Vicente-Cheng/ceph-deploy,isyippee/ceph-deploy,zhouyuan/ceph-deploy,ceph/ceph-deploy,zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,isyippee/ceph-deploy,codenrhoden/ceph-deploy,imzhulei/ceph-deploy,SUSE/ceph-deploy,shenhequnying/ceph-deploy,branto1/ceph-deploy,ghxandsky/ceph-deploy,ghxandsky/ceph-deploy,trhoden/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy
|
[RM-11742] Add tests for argparse purge
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurge(object):
def setup(self):
self.parser = get_parser()
def test_purge_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purge' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purge_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purge_one_host(self):
args = self.parser.parse_args('purge host1'.split())
assert args.host == ['host1']
def test_purge_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purge'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
<commit_before><commit_msg>[RM-11742] Add tests for argparse purge
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>
|
import pytest
from ceph_deploy.cli import get_parser
class TestParserPurge(object):
def setup(self):
self.parser = get_parser()
def test_purge_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purge' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purge_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purge_one_host(self):
args = self.parser.parse_args('purge host1'.split())
assert args.host == ['host1']
def test_purge_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purge'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
[RM-11742] Add tests for argparse purge
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>import pytest
from ceph_deploy.cli import get_parser
class TestParserPurge(object):
def setup(self):
self.parser = get_parser()
def test_purge_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purge' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purge_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purge_one_host(self):
args = self.parser.parse_args('purge host1'.split())
assert args.host == ['host1']
def test_purge_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purge'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
<commit_before><commit_msg>[RM-11742] Add tests for argparse purge
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>import pytest
from ceph_deploy.cli import get_parser
class TestParserPurge(object):
def setup(self):
self.parser = get_parser()
def test_purge_help(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge --help'.split())
out, err = capsys.readouterr()
assert 'usage: ceph-deploy purge' in out
assert 'positional arguments:' in out
assert 'optional arguments:' in out
def test_purge_host_required(self, capsys):
with pytest.raises(SystemExit):
self.parser.parse_args('purge'.split())
out, err = capsys.readouterr()
assert "error: too few arguments" in err
def test_purge_one_host(self):
args = self.parser.parse_args('purge host1'.split())
assert args.host == ['host1']
def test_purge_multiple_hosts(self):
hostnames = ['host1', 'host2', 'host3']
args = self.parser.parse_args(['purge'] + hostnames)
assert frozenset(args.host) == frozenset(hostnames)
|
|
fbbd0a3f55c1e79ebf6ae7b872697611740edb24
|
foobar.py
|
foobar.py
|
# -*- coding: utf-8 -*-
class TreeElement(object):
def __repr__(self):
return u'{}'.format(self.__class__.__name__)
class Token(TreeElement):
def __init__(self, value):
if isinstance(value, TreeElement):
raise TypeError
self._value = value
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._value)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._value > other._value
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._value >= other._value
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._value == other._value
def __hash__(self):
return hash((self.__class__.__name__, self._value))
def tokens(self):
yield self._value
class Not(TreeElement):
def __init__(self, child):
if not isinstance(child, TreeElement):
raise TypeError
self._child = child
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._child)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._child > other._child
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._child >= other._child
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._child == other._child
def __hash__(self):
return hash((self.__class__.__name__, self._child))
def tokens(self):
for token in self._child.tokens():
yield token
class GroupMixin(object):
def __init__(self, a, b, *others):
children = (a, b) + others
for child in children:
if not isinstance(child, TreeElement):
raise TypeError
self._children = tuple(sorted(children))
def __repr__(self):
return u'{}({})'.format(
self.__class__.__name__,
u', '.join(repr(child) for child in self._children)
)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._children > other._children
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._children >= other._children
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._children == other._children
def __hash__(self):
return hash(tuple([self.__class__.__name__]) + self._children)
def tokens(self):
for child in self._children:
for token in child.tokens():
yield token
class And(GroupMixin, TreeElement):
pass
class Or(GroupMixin, TreeElement):
pass
|
Implement Token and Not, And, Or classes
|
Implement Token and Not, And, Or classes
|
Python
|
mit
|
hackebrot/i-am-bool
|
Implement Token and Not, And, Or classes
|
# -*- coding: utf-8 -*-
class TreeElement(object):
def __repr__(self):
return u'{}'.format(self.__class__.__name__)
class Token(TreeElement):
def __init__(self, value):
if isinstance(value, TreeElement):
raise TypeError
self._value = value
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._value)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._value > other._value
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._value >= other._value
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._value == other._value
def __hash__(self):
return hash((self.__class__.__name__, self._value))
def tokens(self):
yield self._value
class Not(TreeElement):
def __init__(self, child):
if not isinstance(child, TreeElement):
raise TypeError
self._child = child
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._child)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._child > other._child
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._child >= other._child
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._child == other._child
def __hash__(self):
return hash((self.__class__.__name__, self._child))
def tokens(self):
for token in self._child.tokens():
yield token
class GroupMixin(object):
def __init__(self, a, b, *others):
children = (a, b) + others
for child in children:
if not isinstance(child, TreeElement):
raise TypeError
self._children = tuple(sorted(children))
def __repr__(self):
return u'{}({})'.format(
self.__class__.__name__,
u', '.join(repr(child) for child in self._children)
)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._children > other._children
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._children >= other._children
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._children == other._children
def __hash__(self):
return hash(tuple([self.__class__.__name__]) + self._children)
def tokens(self):
for child in self._children:
for token in child.tokens():
yield token
class And(GroupMixin, TreeElement):
pass
class Or(GroupMixin, TreeElement):
pass
|
<commit_before><commit_msg>Implement Token and Not, And, Or classes<commit_after>
|
# -*- coding: utf-8 -*-
class TreeElement(object):
def __repr__(self):
return u'{}'.format(self.__class__.__name__)
class Token(TreeElement):
def __init__(self, value):
if isinstance(value, TreeElement):
raise TypeError
self._value = value
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._value)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._value > other._value
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._value >= other._value
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._value == other._value
def __hash__(self):
return hash((self.__class__.__name__, self._value))
def tokens(self):
yield self._value
class Not(TreeElement):
def __init__(self, child):
if not isinstance(child, TreeElement):
raise TypeError
self._child = child
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._child)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._child > other._child
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._child >= other._child
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._child == other._child
def __hash__(self):
return hash((self.__class__.__name__, self._child))
def tokens(self):
for token in self._child.tokens():
yield token
class GroupMixin(object):
def __init__(self, a, b, *others):
children = (a, b) + others
for child in children:
if not isinstance(child, TreeElement):
raise TypeError
self._children = tuple(sorted(children))
def __repr__(self):
return u'{}({})'.format(
self.__class__.__name__,
u', '.join(repr(child) for child in self._children)
)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._children > other._children
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._children >= other._children
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._children == other._children
def __hash__(self):
return hash(tuple([self.__class__.__name__]) + self._children)
def tokens(self):
for child in self._children:
for token in child.tokens():
yield token
class And(GroupMixin, TreeElement):
pass
class Or(GroupMixin, TreeElement):
pass
|
Implement Token and Not, And, Or classes# -*- coding: utf-8 -*-
class TreeElement(object):
def __repr__(self):
return u'{}'.format(self.__class__.__name__)
class Token(TreeElement):
def __init__(self, value):
if isinstance(value, TreeElement):
raise TypeError
self._value = value
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._value)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._value > other._value
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._value >= other._value
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._value == other._value
def __hash__(self):
return hash((self.__class__.__name__, self._value))
def tokens(self):
yield self._value
class Not(TreeElement):
def __init__(self, child):
if not isinstance(child, TreeElement):
raise TypeError
self._child = child
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._child)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._child > other._child
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._child >= other._child
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._child == other._child
def __hash__(self):
return hash((self.__class__.__name__, self._child))
def tokens(self):
for token in self._child.tokens():
yield token
class GroupMixin(object):
def __init__(self, a, b, *others):
children = (a, b) + others
for child in children:
if not isinstance(child, TreeElement):
raise TypeError
self._children = tuple(sorted(children))
def __repr__(self):
return u'{}({})'.format(
self.__class__.__name__,
u', '.join(repr(child) for child in self._children)
)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._children > other._children
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._children >= other._children
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._children == other._children
def __hash__(self):
return hash(tuple([self.__class__.__name__]) + self._children)
def tokens(self):
for child in self._children:
for token in child.tokens():
yield token
class And(GroupMixin, TreeElement):
pass
class Or(GroupMixin, TreeElement):
pass
|
<commit_before><commit_msg>Implement Token and Not, And, Or classes<commit_after># -*- coding: utf-8 -*-
class TreeElement(object):
def __repr__(self):
return u'{}'.format(self.__class__.__name__)
class Token(TreeElement):
def __init__(self, value):
if isinstance(value, TreeElement):
raise TypeError
self._value = value
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._value)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._value > other._value
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._value >= other._value
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._value == other._value
def __hash__(self):
return hash((self.__class__.__name__, self._value))
def tokens(self):
yield self._value
class Not(TreeElement):
def __init__(self, child):
if not isinstance(child, TreeElement):
raise TypeError
self._child = child
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self._child)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._child > other._child
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._child >= other._child
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._child == other._child
def __hash__(self):
return hash((self.__class__.__name__, self._child))
def tokens(self):
for token in self._child.tokens():
yield token
class GroupMixin(object):
def __init__(self, a, b, *others):
children = (a, b) + others
for child in children:
if not isinstance(child, TreeElement):
raise TypeError
self._children = tuple(sorted(children))
def __repr__(self):
return u'{}({})'.format(
self.__class__.__name__,
u', '.join(repr(child) for child in self._children)
)
def __gt__(self, other):
if not isinstance(other, self.__class__):
return hash(self) > hash(other)
return self._children > other._children
def __ge__(self, other):
if not isinstance(other, self.__class__):
return hash(self) >= hash(other)
return self._children >= other._children
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self._children == other._children
def __hash__(self):
return hash(tuple([self.__class__.__name__]) + self._children)
def tokens(self):
for child in self._children:
for token in child.tokens():
yield token
class And(GroupMixin, TreeElement):
pass
class Or(GroupMixin, TreeElement):
pass
|
|
cf4dc3f65049c23f4a20140cf5092f3c5a771a6a
|
administrator/tests/models/test_user.py
|
administrator/tests/models/test_user.py
|
from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = 'example@example.com'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = 'example@example.com'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
|
Add tests for User model
|
Add tests for User model
|
Python
|
mit
|
Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python
|
Add tests for User model
|
from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = 'example@example.com'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = 'example@example.com'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
|
<commit_before><commit_msg>Add tests for User model<commit_after>
|
from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = 'example@example.com'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = 'example@example.com'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
|
Add tests for User modelfrom django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = 'example@example.com'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = 'example@example.com'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
|
<commit_before><commit_msg>Add tests for User model<commit_after>from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = 'example@example.com'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = 'example@example.com'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
|
|
b9a8be97bc445edac024a35d644d5c86419a7f3a
|
ReactAndroid/src/main/third-party/android-support-for-standalone-apps/v7/appcompat/res-unpacker.py
|
ReactAndroid/src/main/third-party/android-support-for-standalone-apps/v7/appcompat/res-unpacker.py
|
import contextlib
import os
import shutil
import sys
import tempfile
import zipfile
# Helper that unpacks the contents of the res folder of an .aar file
# into given destination.
@contextlib.contextmanager
def cleanup(path):
yield path
shutil.rmtree(path)
if __name__ == '__main__':
with zipfile.ZipFile(sys.argv[1], 'r') as z:
with cleanup(tempfile.mkdtemp()) as temp_path:
z.extractall(temp_path, filter(lambda n: n.startswith('res/'), z.namelist()))
shutil.move(os.path.join(temp_path, 'res'), sys.argv[2])
|
Build code depending on resources from the appcompat library with both Buck and Gradle
|
Build code depending on resources from the appcompat library with both Buck and Gradle
Reviewed By: sdwilsh
Differential Revision: D2844961
fb-gh-sync-id: 686a9f253eb370a9dc8cc33ca1c4e8453f89210a
|
Python
|
bsd-3-clause
|
hammerandchisel/react-native,jaggs6/react-native,ultralame/react-native,alin23/react-native,yamill/react-native,doochik/react-native,iodine/react-native,Swaagie/react-native,facebook/react-native,exponentjs/react-native,corbt/react-native,makadaw/react-native,arthuralee/react-native,jevakallio/react-native,forcedotcom/react-native,sghiassy/react-native,peterp/react-native,hammerandchisel/react-native,negativetwelve/react-native,gre/react-native,mrspeaker/react-native,a2/react-native,charlesvinette/react-native,aljs/react-native,foghina/react-native,Swaagie/react-native,clozr/react-native,mrspeaker/react-native,hoangpham95/react-native,nathanajah/react-native,chirag04/react-native,shrutic/react-native,xiayz/react-native,miracle2k/react-native,dabit3/react-native,jasonnoahchoi/react-native,ultralame/react-native,farazs/react-native,shinate/react-native,ultralame/react-native,chnfeeeeeef/react-native,aljs/react-native,yamill/react-native,apprennet/react-native,dabit3/react-native,ndejesus1227/react-native,CntChen/react-native,skatpgusskat/react-native,jasonnoahchoi/react-native,tsjing/react-native,BretJohnson/react-native,CntChen/react-native,salanki/react-native,jasonnoahchoi/react-native,urvashi01/react-native,cdlewis/react-native,tadeuzagallo/react-native,formatlos/react-native,formatlos/react-native,BretJohnson/react-native,Bhullnatik/react-native,brentvatne/react-native,exponent/react-native,makadaw/react-native,aljs/react-native,esauter5/react-native,exponent/react-native,compulim/react-native,mrspeaker/react-native,Maxwell2022/react-native,ptomasroos/react-native,compulim/react-native,exponent/react-native,PlexChat/react-native,negativetwelve/react-native,imjerrybao/react-native,hoastoolshop/react-native,hayeah/react-native,imjerrybao/react-native,charlesvinette/react-native,brentvatne/react-native,naoufal/react-native,gilesvangruisen/react-native,callstack-io/react-native,christopherdro/react-native,kesha-antonov/react-native,imDangerous/react-native,CntChen/react-native,skatpgusskat/react-native,miracle2k/react-native,ptomasroos/react-native,ankitsinghania94/react-native,Maxwell2022/react-native,shrutic/react-native,doochik/react-native,hoastoolshop/react-native,alin23/react-native,tgoldenberg/react-native,Andreyco/react-native,chnfeeeeeef/react-native,jhen0409/react-native,skevy/react-native,Swaagie/react-native,formatlos/react-native,browniefed/react-native,Emilios1995/react-native,MattFoley/react-native,christopherdro/react-native,Guardiannw/react-native,thotegowda/react-native,jevakallio/react-native,mironiasty/react-native,clozr/react-native,Guardiannw/react-native,CodeLinkIO/react-native,cpunion/react-native,browniefed/react-native,facebook/react-native,forcedotcom/react-native,rebeccahughes/react-native,Purii/react-native,arbesfeld/react-native,htc2u/react-native,hammerandchisel/react-native,Guardiannw/react-native,Andreyco/react-native,ankitsinghania94/react-native,negativetwelve/react-native,corbt/react-native,mironiasty/react-native,imjerrybao/react-native,facebook/react-native,myntra/react-native,shrutic/react-native,happypancake/react-native,ankitsinghania94/react-native,tarkus/react-native-appletv,ptmt/react-native-macos,rebeccahughes/react-native,InterfaceInc/react-native,Ehesp/react-native,exponent/react-native,skevy/react-native,a2/react-native,iodine/react-native,dubert/react-native,adamjmcgrath/react-native,Swaagie/react-native,peterp/react-native,apprennet/react-native,naoufal/react-native,tgoldenberg/react-native,farazs/react-native,gitim/react-native,lelandrichardson/react-native,satya164/react-native,jadbox/react-native,rebeccahughes/react-native,thotegowda/react-native,myntra/react-native,DerayGa/react-native,shrutic123/react-native,hoastoolshop/react-native,jaggs6/react-native,Purii/react-native,sghiassy/react-native,philikon/react-native,nathanajah/react-native,tszajna0/react-native,formatlos/react-native,imDangerous/react-native,Guardiannw/react-native,janicduplessis/react-native,aaron-goshine/react-native,gre/react-native,martinbigio/react-native,ndejesus1227/react-native,rickbeerendonk/react-native,tgoldenberg/react-native,Livyli/react-native,skevy/react-native,jadbox/react-native,chnfeeeeeef/react-native,CntChen/react-native,gre/react-native,kesha-antonov/react-native,Guardiannw/react-native,arthuralee/react-native,callstack-io/react-native,wesley1001/react-native,shrutic/react-native,arthuralee/react-native,aljs/react-native,gitim/react-native,doochik/react-native,chnfeeeeeef/react-native,CodeLinkIO/react-native,a2/react-native,corbt/react-native,ptmt/react-native-macos,jhen0409/react-native,esauter5/react-native,a2/react-native,doochik/react-native,Andreyco/react-native,ptmt/react-native-macos,jadbox/react-native,iodine/react-native,Purii/react-native,charlesvinette/react-native,CodeLinkIO/react-native,shrutic123/react-native,DannyvanderJagt/react-native,dikaiosune/react-native,jadbox/react-native,eduardinni/react-native,mrspeaker/react-native,dubert/react-native,jevakallio/react-native,ankitsinghania94/react-native,yamill/react-native,hoangpham95/react-native,skatpgusskat/react-native,clozr/react-native,apprennet/react-native,luqin/react-native,gre/react-native,tsjing/react-native,thotegowda/react-native,tgoldenberg/react-native,skatpgusskat/react-native,javache/react-native,shrutic/react-native,hayeah/react-native,kesha-antonov/react-native,thotegowda/react-native,ptomasroos/react-native,eduardinni/react-native,tarkus/react-native-appletv,pglotov/react-native,yamill/react-native,ptomasroos/react-native,machard/react-native,miracle2k/react-native,eduardinni/react-native,InterfaceInc/react-native,shinate/react-native,esauter5/react-native,shrutic123/react-native,happypancake/react-native,corbt/react-native,thotegowda/react-native,DanielMSchmidt/react-native,DanielMSchmidt/react-native,shinate/react-native,Livyli/react-native,chirag04/react-native,skevy/react-native,rebeccahughes/react-native,janicduplessis/react-native,peterp/react-native,chirag04/react-native,dikaiosune/react-native,jeffchienzabinet/react-native,machard/react-native,wesley1001/react-native,Livyli/react-native,ndejesus1227/react-native,InterfaceInc/react-native,jasonnoahchoi/react-native,adamjmcgrath/react-native,wenpkpk/react-native,tadeuzagallo/react-native,eduardinni/react-native,javache/react-native,csatf/react-native,jadbox/react-native,BretJohnson/react-native,janicduplessis/react-native,imDangerous/react-native,urvashi01/react-native,catalinmiron/react-native,negativetwelve/react-native,javache/react-native,martinbigio/react-native,yamill/react-native,brentvatne/react-native,catalinmiron/react-native,htc2u/react-native,corbt/react-native,alin23/react-native,tadeuzagallo/react-native,chirag04/react-native,browniefed/react-native,philikon/react-native,iodine/react-native,foghina/react-native,Livyli/react-native,christopherdro/react-native,orenklein/react-native,mironiasty/react-native,htc2u/react-native,apprennet/react-native,hoastoolshop/react-native,DerayGa/react-native,Ehesp/react-native,dubert/react-native,rickbeerendonk/react-native,gre/react-native,dubert/react-native,foghina/react-native,ultralame/react-native,luqin/react-native,DerayGa/react-native,dikaiosune/react-native,Tredsite/react-native,orenklein/react-native,martinbigio/react-native,luqin/react-native,forcedotcom/react-native,ptomasroos/react-native,eduardinni/react-native,ndejesus1227/react-native,DerayGa/react-native,orenklein/react-native,dabit3/react-native,Tredsite/react-native,tgoldenberg/react-native,htc2u/react-native,facebook/react-native,cosmith/react-native,philikon/react-native,hoangpham95/react-native,mrspeaker/react-native,tsjing/react-native,farazs/react-native,Emilios1995/react-native,foghina/react-native,clozr/react-native,ndejesus1227/react-native,ptomasroos/react-native,tarkus/react-native-appletv,cosmith/react-native,gre/react-native,kesha-antonov/react-native,esauter5/react-native,browniefed/react-native,myntra/react-native,tsjing/react-native,aaron-goshine/react-native,xiayz/react-native,satya164/react-native,imDangerous/react-native,Tredsite/react-native,charlesvinette/react-native,PlexChat/react-native,martinbigio/react-native,hayeah/react-native,tadeuzagallo/react-native,pglotov/react-native,javache/react-native,sghiassy/react-native,exponentjs/react-native,adamjmcgrath/react-native,chirag04/react-native,urvashi01/react-native,xiayz/react-native,ptmt/react-native-macos,lprhodes/react-native,gilesvangruisen/react-native,corbt/react-native,csatf/react-native,PlexChat/react-native,csatf/react-native,MattFoley/react-native,gilesvangruisen/react-native,formatlos/react-native,jaggs6/react-native,frantic/react-native,Emilios1995/react-native,philikon/react-native,shrutic123/react-native,peterp/react-native,jaggs6/react-native,cpunion/react-native,Emilios1995/react-native,frantic/react-native,browniefed/react-native,makadaw/react-native,exponent/react-native,Purii/react-native,cosmith/react-native,nickhudkins/react-native,satya164/react-native,dabit3/react-native,alin23/react-native,chirag04/react-native,hammerandchisel/react-native,myntra/react-native,Ehesp/react-native,cpunion/react-native,Tredsite/react-native,shinate/react-native,doochik/react-native,nickhudkins/react-native,urvashi01/react-native,brentvatne/react-native,tsjing/react-native,jevakallio/react-native,jevakallio/react-native,makadaw/react-native,Maxwell2022/react-native,Purii/react-native,myntra/react-native,foghina/react-native,DannyvanderJagt/react-native,gilesvangruisen/react-native,arbesfeld/react-native,Guardiannw/react-native,aaron-goshine/react-native,dabit3/react-native,exponentjs/react-native,DannyvanderJagt/react-native,eduardinni/react-native,aljs/react-native,jhen0409/react-native,tadeuzagallo/react-native,Tredsite/react-native,doochik/react-native,jeffchienzabinet/react-native,dikaiosune/react-native,charlesvinette/react-native,lprhodes/react-native,skatpgusskat/react-native,imjerrybao/react-native,jaggs6/react-native,pandiaraj44/react-native,wesley1001/react-native,ultralame/react-native,cosmith/react-native,happypancake/react-native,exponent/react-native,satya164/react-native,exponent/react-native,forcedotcom/react-native,corbt/react-native,jhen0409/react-native,InterfaceInc/react-native,DannyvanderJagt/react-native,yamill/react-native,farazs/react-native,machard/react-native,gitim/react-native,shinate/react-native,myntra/react-native,salanki/react-native,skatpgusskat/react-native,compulim/react-native,tszajna0/react-native,iodine/react-native,hoastoolshop/react-native,MattFoley/react-native,browniefed/react-native,DerayGa/react-native,rickbeerendonk/react-native,xiayz/react-native,mrspeaker/react-native,yamill/react-native,lprhodes/react-native,imjerrybao/react-native,christopherdro/react-native,DanielMSchmidt/react-native,happypancake/react-native,tszajna0/react-native,dikaiosune/react-native,dubert/react-native,lprhodes/react-native,sghiassy/react-native,wenpkpk/react-native,farazs/react-native,sghiassy/react-native,cosmith/react-native,urvashi01/react-native,thotegowda/react-native,Purii/react-native,hoastoolshop/react-native,apprennet/react-native,exponentjs/react-native,imjerrybao/react-native,jasonnoahchoi/react-native,rebeccahughes/react-native,happypancake/react-native,MattFoley/react-native,mironiasty/react-native,naoufal/react-native,imDangerous/react-native,cosmith/react-native,wenpkpk/react-native,PlexChat/react-native,csatf/react-native,a2/react-native,CntChen/react-native,Ehesp/react-native,tszajna0/react-native,a2/react-native,ptmt/react-native-macos,gre/react-native,vjeux/react-native,iodine/react-native,BretJohnson/react-native,kesha-antonov/react-native,aljs/react-native,dubert/react-native,orenklein/react-native,chnfeeeeeef/react-native,luqin/react-native,tadeuzagallo/react-native,jeffchienzabinet/react-native,christopherdro/react-native,forcedotcom/react-native,janicduplessis/react-native,nickhudkins/react-native,hoangpham95/react-native,skatpgusskat/react-native,gilesvangruisen/react-native,CntChen/react-native,catalinmiron/react-native,iodine/react-native,BretJohnson/react-native,makadaw/react-native,Andreyco/react-native,philikon/react-native,ndejesus1227/react-native,machard/react-native,christopherdro/react-native,luqin/react-native,cosmith/react-native,Bhullnatik/react-native,gitim/react-native,shrutic/react-native,philikon/react-native,cpunion/react-native,Andreyco/react-native,Guardiannw/react-native,nathanajah/react-native,pandiaraj44/react-native,PlexChat/react-native,wesley1001/react-native,farazs/react-native,janicduplessis/react-native,Ehesp/react-native,shinate/react-native,jasonnoahchoi/react-native,aaron-goshine/react-native,gre/react-native,Swaagie/react-native,facebook/react-native,yamill/react-native,machard/react-native,orenklein/react-native,martinbigio/react-native,lelandrichardson/react-native,foghina/react-native,satya164/react-native,forcedotcom/react-native,christopherdro/react-native,negativetwelve/react-native,formatlos/react-native,wesley1001/react-native,hoangpham95/react-native,pandiaraj44/react-native,negativetwelve/react-native,tszajna0/react-native,peterp/react-native,urvashi01/react-native,shrutic/react-native,nathanajah/react-native,tarkus/react-native-appletv,javache/react-native,wesley1001/react-native,satya164/react-native,negativetwelve/react-native,CntChen/react-native,nathanajah/react-native,charlesvinette/react-native,facebook/react-native,Emilios1995/react-native,DanielMSchmidt/react-native,catalinmiron/react-native,exponentjs/react-native,Swaagie/react-native,DannyvanderJagt/react-native,Livyli/react-native,hammerandchisel/react-native,xiayz/react-native,nickhudkins/react-native,adamjmcgrath/react-native,tszajna0/react-native,ankitsinghania94/react-native,janicduplessis/react-native,callstack-io/react-native,MattFoley/react-native,salanki/react-native,kesha-antonov/react-native,foghina/react-native,csatf/react-native,cosmith/react-native,adamjmcgrath/react-native,dikaiosune/react-native,christopherdro/react-native,Purii/react-native,Bhullnatik/react-native,naoufal/react-native,rickbeerendonk/react-native,gitim/react-native,lelandrichardson/react-native,javache/react-native,lelandrichardson/react-native,gilesvangruisen/react-native,urvashi01/react-native,Swaagie/react-native,esauter5/react-native,javache/react-native,rickbeerendonk/react-native,DanielMSchmidt/react-native,facebook/react-native,nickhudkins/react-native,hoangpham95/react-native,DannyvanderJagt/react-native,aaron-goshine/react-native,Livyli/react-native,jhen0409/react-native,ptmt/react-native-macos,jeffchienzabinet/react-native,rickbeerendonk/react-native,martinbigio/react-native,naoufal/react-native,frantic/react-native,catalinmiron/react-native,hayeah/react-native,adamjmcgrath/react-native,Livyli/react-native,machard/react-native,kesha-antonov/react-native,ptmt/react-native-macos,clozr/react-native,tgoldenberg/react-native,rickbeerendonk/react-native,philikon/react-native,myntra/react-native,Bhullnatik/react-native,Bhullnatik/react-native,wenpkpk/react-native,thotegowda/react-native,brentvatne/react-native,happypancake/react-native,lprhodes/react-native,javache/react-native,peterp/react-native,jadbox/react-native,pglotov/react-native,skevy/react-native,callstack-io/react-native,vjeux/react-native,xiayz/react-native,salanki/react-native,shrutic123/react-native,tarkus/react-native-appletv,htc2u/react-native,jaggs6/react-native,mironiasty/react-native,pandiaraj44/react-native,skatpgusskat/react-native,jadbox/react-native,InterfaceInc/react-native,lprhodes/react-native,Tredsite/react-native,exponentjs/react-native,gilesvangruisen/react-native,ankitsinghania94/react-native,arbesfeld/react-native,callstack-io/react-native,formatlos/react-native,chnfeeeeeef/react-native,a2/react-native,nickhudkins/react-native,machard/react-native,doochik/react-native,Tredsite/react-native,miracle2k/react-native,miracle2k/react-native,makadaw/react-native,gitim/react-native,farazs/react-native,mrspeaker/react-native,ptomasroos/react-native,MattFoley/react-native,corbt/react-native,naoufal/react-native,compulim/react-native,myntra/react-native,tarkus/react-native-appletv,negativetwelve/react-native,Ehesp/react-native,facebook/react-native,dubert/react-native,frantic/react-native,dikaiosune/react-native,adamjmcgrath/react-native,vjeux/react-native,orenklein/react-native,Bhullnatik/react-native,facebook/react-native,CodeLinkIO/react-native,tarkus/react-native-appletv,MattFoley/react-native,Emilios1995/react-native,forcedotcom/react-native,dabit3/react-native,dikaiosune/react-native,orenklein/react-native,forcedotcom/react-native,DerayGa/react-native,cdlewis/react-native,arbesfeld/react-native,ptomasroos/react-native,hoastoolshop/react-native,Guardiannw/react-native,htc2u/react-native,Ehesp/react-native,vjeux/react-native,nathanajah/react-native,tadeuzagallo/react-native,cdlewis/react-native,wesley1001/react-native,alin23/react-native,imDangerous/react-native,cdlewis/react-native,arbesfeld/react-native,a2/react-native,pglotov/react-native,wenpkpk/react-native,jaggs6/react-native,satya164/react-native,gilesvangruisen/react-native,jhen0409/react-native,happypancake/react-native,janicduplessis/react-native,pglotov/react-native,DerayGa/react-native,formatlos/react-native,CodeLinkIO/react-native,makadaw/react-native,tadeuzagallo/react-native,adamjmcgrath/react-native,aljs/react-native,doochik/react-native,Livyli/react-native,peterp/react-native,lelandrichardson/react-native,hammerandchisel/react-native,brentvatne/react-native,ndejesus1227/react-native,PlexChat/react-native,InterfaceInc/react-native,skevy/react-native,luqin/react-native,jasonnoahchoi/react-native,callstack-io/react-native,mironiasty/react-native,brentvatne/react-native,cpunion/react-native,csatf/react-native,salanki/react-native,imjerrybao/react-native,martinbigio/react-native,pandiaraj44/react-native,tszajna0/react-native,apprennet/react-native,jadbox/react-native,naoufal/react-native,xiayz/react-native,eduardinni/react-native,csatf/react-native,salanki/react-native,farazs/react-native,nathanajah/react-native,CodeLinkIO/react-native,vjeux/react-native,hammerandchisel/react-native,callstack-io/react-native,brentvatne/react-native,cdlewis/react-native,lelandrichardson/react-native,dubert/react-native,foghina/react-native,DanielMSchmidt/react-native,pglotov/react-native,alin23/react-native,catalinmiron/react-native,aaron-goshine/react-native,hoangpham95/react-native,jeffchienzabinet/react-native,jeffchienzabinet/react-native,aljs/react-native,machard/react-native,frantic/react-native,DannyvanderJagt/react-native,browniefed/react-native,catalinmiron/react-native,shrutic/react-native,cdlewis/react-native,mrspeaker/react-native,skevy/react-native,clozr/react-native,urvashi01/react-native,wenpkpk/react-native,Maxwell2022/react-native,tgoldenberg/react-native,chnfeeeeeef/react-native,charlesvinette/react-native,exponentjs/react-native,miracle2k/react-native,Bhullnatik/react-native,chnfeeeeeef/react-native,jaggs6/react-native,exponentjs/react-native,myntra/react-native,farazs/react-native,jevakallio/react-native,cpunion/react-native,MattFoley/react-native,rickbeerendonk/react-native,satya164/react-native,ndejesus1227/react-native,Bhullnatik/react-native,wesley1001/react-native,PlexChat/react-native,makadaw/react-native,esauter5/react-native,Andreyco/react-native,tszajna0/react-native,htc2u/react-native,ankitsinghania94/react-native,jeffchienzabinet/react-native,tarkus/react-native-appletv,Maxwell2022/react-native,cpunion/react-native,mironiasty/react-native,Tredsite/react-native,martinbigio/react-native,cdlewis/react-native,charlesvinette/react-native,BretJohnson/react-native,javache/react-native,shrutic123/react-native,miracle2k/react-native,gitim/react-native,imDangerous/react-native,imjerrybao/react-native,clozr/react-native,doochik/react-native,chirag04/react-native,rickbeerendonk/react-native,luqin/react-native,Ehesp/react-native,dabit3/react-native,negativetwelve/react-native,luqin/react-native,cpunion/react-native,jhen0409/react-native,catalinmiron/react-native,clozr/react-native,sghiassy/react-native,salanki/react-native,nickhudkins/react-native,shrutic123/react-native,arthuralee/react-native,alin23/react-native,shinate/react-native,iodine/react-native,formatlos/react-native,tsjing/react-native,BretJohnson/react-native,DerayGa/react-native,CodeLinkIO/react-native,dabit3/react-native,xiayz/react-native,pandiaraj44/react-native,thotegowda/react-native,peterp/react-native,ptmt/react-native-macos,philikon/react-native,htc2u/react-native,Andreyco/react-native,InterfaceInc/react-native,Maxwell2022/react-native,arbesfeld/react-native,naoufal/react-native,CodeLinkIO/react-native,hoangpham95/react-native,miracle2k/react-native,imDangerous/react-native,jevakallio/react-native,CntChen/react-native,arbesfeld/react-native,Swaagie/react-native,Emilios1995/react-native,esauter5/react-native,lprhodes/react-native,mironiasty/react-native,makadaw/react-native,janicduplessis/react-native,kesha-antonov/react-native,compulim/react-native,aaron-goshine/react-native,apprennet/react-native,Maxwell2022/react-native,chirag04/react-native,apprennet/react-native,csatf/react-native,wenpkpk/react-native,nathanajah/react-native,skevy/react-native,mironiasty/react-native,BretJohnson/react-native,happypancake/react-native,tsjing/react-native,pandiaraj44/react-native,PlexChat/react-native,arthuralee/react-native,jevakallio/react-native,jasonnoahchoi/react-native,salanki/react-native,jeffchienzabinet/react-native,InterfaceInc/react-native,cdlewis/react-native,jhen0409/react-native,DannyvanderJagt/react-native,DanielMSchmidt/react-native,callstack-io/react-native,exponent/react-native,Emilios1995/react-native,nickhudkins/react-native,tgoldenberg/react-native,pglotov/react-native,wenpkpk/react-native,eduardinni/react-native,Andreyco/react-native,hayeah/react-native,browniefed/react-native,esauter5/react-native,pandiaraj44/react-native,ankitsinghania94/react-native,lprhodes/react-native,DanielMSchmidt/react-native,sghiassy/react-native,lelandrichardson/react-native,sghiassy/react-native,aaron-goshine/react-native,tsjing/react-native,shrutic123/react-native,pglotov/react-native,alin23/react-native,cdlewis/react-native,hoastoolshop/react-native,arbesfeld/react-native,shinate/react-native,jevakallio/react-native,brentvatne/react-native,lelandrichardson/react-native,hammerandchisel/react-native,Maxwell2022/react-native,Purii/react-native,orenklein/react-native,kesha-antonov/react-native,gitim/react-native
|
Build code depending on resources from the appcompat library with both Buck and Gradle
Reviewed By: sdwilsh
Differential Revision: D2844961
fb-gh-sync-id: 686a9f253eb370a9dc8cc33ca1c4e8453f89210a
|
import contextlib
import os
import shutil
import sys
import tempfile
import zipfile
# Helper that unpacks the contents of the res folder of an .aar file
# into given destination.
@contextlib.contextmanager
def cleanup(path):
yield path
shutil.rmtree(path)
if __name__ == '__main__':
with zipfile.ZipFile(sys.argv[1], 'r') as z:
with cleanup(tempfile.mkdtemp()) as temp_path:
z.extractall(temp_path, filter(lambda n: n.startswith('res/'), z.namelist()))
shutil.move(os.path.join(temp_path, 'res'), sys.argv[2])
|
<commit_before><commit_msg>Build code depending on resources from the appcompat library with both Buck and Gradle
Reviewed By: sdwilsh
Differential Revision: D2844961
fb-gh-sync-id: 686a9f253eb370a9dc8cc33ca1c4e8453f89210a<commit_after>
|
import contextlib
import os
import shutil
import sys
import tempfile
import zipfile
# Helper that unpacks the contents of the res folder of an .aar file
# into given destination.
@contextlib.contextmanager
def cleanup(path):
yield path
shutil.rmtree(path)
if __name__ == '__main__':
with zipfile.ZipFile(sys.argv[1], 'r') as z:
with cleanup(tempfile.mkdtemp()) as temp_path:
z.extractall(temp_path, filter(lambda n: n.startswith('res/'), z.namelist()))
shutil.move(os.path.join(temp_path, 'res'), sys.argv[2])
|
Build code depending on resources from the appcompat library with both Buck and Gradle
Reviewed By: sdwilsh
Differential Revision: D2844961
fb-gh-sync-id: 686a9f253eb370a9dc8cc33ca1c4e8453f89210aimport contextlib
import os
import shutil
import sys
import tempfile
import zipfile
# Helper that unpacks the contents of the res folder of an .aar file
# into given destination.
@contextlib.contextmanager
def cleanup(path):
yield path
shutil.rmtree(path)
if __name__ == '__main__':
with zipfile.ZipFile(sys.argv[1], 'r') as z:
with cleanup(tempfile.mkdtemp()) as temp_path:
z.extractall(temp_path, filter(lambda n: n.startswith('res/'), z.namelist()))
shutil.move(os.path.join(temp_path, 'res'), sys.argv[2])
|
<commit_before><commit_msg>Build code depending on resources from the appcompat library with both Buck and Gradle
Reviewed By: sdwilsh
Differential Revision: D2844961
fb-gh-sync-id: 686a9f253eb370a9dc8cc33ca1c4e8453f89210a<commit_after>import contextlib
import os
import shutil
import sys
import tempfile
import zipfile
# Helper that unpacks the contents of the res folder of an .aar file
# into given destination.
@contextlib.contextmanager
def cleanup(path):
yield path
shutil.rmtree(path)
if __name__ == '__main__':
with zipfile.ZipFile(sys.argv[1], 'r') as z:
with cleanup(tempfile.mkdtemp()) as temp_path:
z.extractall(temp_path, filter(lambda n: n.startswith('res/'), z.namelist()))
shutil.move(os.path.join(temp_path, 'res'), sys.argv[2])
|
|
359726a1085b00936e7e7ee3f876b09f72988921
|
dipy/utils/tests/test_tripwire.py
|
dipy/utils/tests/test_tripwire.py
|
""" Testing tripwire module.
"""
from ..tripwire import TripWire, is_tripwire, TripWireError
from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
def test_is_tripwire():
assert_false(is_tripwire(object()))
assert_true(is_tripwire(TripWire('some message')))
def test_tripwire():
# Test tripwire object
silly_module_name = TripWire('We do not have silly_module_name')
assert_raises(TripWireError,
getattr,
silly_module_name,
'do_silly_thing')
# Check AttributeError can be checked too
try:
silly_module_name.__wrapped__
except TripWireError as err:
assert_true(isinstance(err, AttributeError))
else:
raise RuntimeError("No error raised, but expected")
|
Add a test for TripWire.
|
TST: Add a test for TripWire.
|
Python
|
bsd-3-clause
|
StongeEtienne/dipy,villalonreina/dipy,villalonreina/dipy,nilgoyyou/dipy,FrancoisRheaultUS/dipy,matthieudumont/dipy,StongeEtienne/dipy,nilgoyyou/dipy,matthieudumont/dipy,FrancoisRheaultUS/dipy
|
TST: Add a test for TripWire.
|
""" Testing tripwire module.
"""
from ..tripwire import TripWire, is_tripwire, TripWireError
from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
def test_is_tripwire():
assert_false(is_tripwire(object()))
assert_true(is_tripwire(TripWire('some message')))
def test_tripwire():
# Test tripwire object
silly_module_name = TripWire('We do not have silly_module_name')
assert_raises(TripWireError,
getattr,
silly_module_name,
'do_silly_thing')
# Check AttributeError can be checked too
try:
silly_module_name.__wrapped__
except TripWireError as err:
assert_true(isinstance(err, AttributeError))
else:
raise RuntimeError("No error raised, but expected")
|
<commit_before><commit_msg>TST: Add a test for TripWire.<commit_after>
|
""" Testing tripwire module.
"""
from ..tripwire import TripWire, is_tripwire, TripWireError
from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
def test_is_tripwire():
assert_false(is_tripwire(object()))
assert_true(is_tripwire(TripWire('some message')))
def test_tripwire():
# Test tripwire object
silly_module_name = TripWire('We do not have silly_module_name')
assert_raises(TripWireError,
getattr,
silly_module_name,
'do_silly_thing')
# Check AttributeError can be checked too
try:
silly_module_name.__wrapped__
except TripWireError as err:
assert_true(isinstance(err, AttributeError))
else:
raise RuntimeError("No error raised, but expected")
|
TST: Add a test for TripWire.""" Testing tripwire module.
"""
from ..tripwire import TripWire, is_tripwire, TripWireError
from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
def test_is_tripwire():
assert_false(is_tripwire(object()))
assert_true(is_tripwire(TripWire('some message')))
def test_tripwire():
# Test tripwire object
silly_module_name = TripWire('We do not have silly_module_name')
assert_raises(TripWireError,
getattr,
silly_module_name,
'do_silly_thing')
# Check AttributeError can be checked too
try:
silly_module_name.__wrapped__
except TripWireError as err:
assert_true(isinstance(err, AttributeError))
else:
raise RuntimeError("No error raised, but expected")
|
<commit_before><commit_msg>TST: Add a test for TripWire.<commit_after>""" Testing tripwire module.
"""
from ..tripwire import TripWire, is_tripwire, TripWireError
from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
def test_is_tripwire():
assert_false(is_tripwire(object()))
assert_true(is_tripwire(TripWire('some message')))
def test_tripwire():
# Test tripwire object
silly_module_name = TripWire('We do not have silly_module_name')
assert_raises(TripWireError,
getattr,
silly_module_name,
'do_silly_thing')
# Check AttributeError can be checked too
try:
silly_module_name.__wrapped__
except TripWireError as err:
assert_true(isinstance(err, AttributeError))
else:
raise RuntimeError("No error raised, but expected")
|
|
1103c498635480d516eeebbec615f8d13db51bb7
|
api/tests/test_topic_api.py
|
api/tests/test_topic_api.py
|
import pytz
from datetime import datetime, timedelta
from django.test import TestCase
from api.factories import TopicFactory
from rest_framework.test import APIRequestFactory, force_authenticate
from api.factories import UserFactory
from api.serializers import TopicSerializer
from api.views.topics import TopicViewSet
class TopicAPITest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.active_view = TopicViewSet.as_view({'get': 'active'})
self.inactive_view = TopicViewSet.as_view({'get': 'inactive'})
self.user = UserFactory()
self.active_topics_count = 5
self.inactive_topics_count = 5
TopicFactory.create_batch(self.active_topics_count, start=datetime.now(pytz.utc) - timedelta(days=3),
end=datetime.now(pytz.utc) + timedelta(days=4))
TopicFactory.create_batch(self.inactive_topics_count, start=datetime.now(pytz.utc) + timedelta(weeks=1),
end=datetime.now(pytz.utc) + timedelta(weeks=2))
def test_active_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/active")
force_authenticate(request, user=self.user)
response = self.active_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.active_topics_count, len(topics))
for topic in topics:
self.assertTrue(topic.is_active())
def test_inactive_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/inactive")
force_authenticate(request, user=self.user)
response = self.inactive_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.inactive_topics_count, len(topics))
for topic in topics:
self.assertFalse(topic.is_active())
|
Write tests for active and inactive list routes
|
Write tests for active and inactive list routes
|
Python
|
mit
|
frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq
|
Write tests for active and inactive list routes
|
import pytz
from datetime import datetime, timedelta
from django.test import TestCase
from api.factories import TopicFactory
from rest_framework.test import APIRequestFactory, force_authenticate
from api.factories import UserFactory
from api.serializers import TopicSerializer
from api.views.topics import TopicViewSet
class TopicAPITest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.active_view = TopicViewSet.as_view({'get': 'active'})
self.inactive_view = TopicViewSet.as_view({'get': 'inactive'})
self.user = UserFactory()
self.active_topics_count = 5
self.inactive_topics_count = 5
TopicFactory.create_batch(self.active_topics_count, start=datetime.now(pytz.utc) - timedelta(days=3),
end=datetime.now(pytz.utc) + timedelta(days=4))
TopicFactory.create_batch(self.inactive_topics_count, start=datetime.now(pytz.utc) + timedelta(weeks=1),
end=datetime.now(pytz.utc) + timedelta(weeks=2))
def test_active_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/active")
force_authenticate(request, user=self.user)
response = self.active_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.active_topics_count, len(topics))
for topic in topics:
self.assertTrue(topic.is_active())
def test_inactive_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/inactive")
force_authenticate(request, user=self.user)
response = self.inactive_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.inactive_topics_count, len(topics))
for topic in topics:
self.assertFalse(topic.is_active())
|
<commit_before><commit_msg>Write tests for active and inactive list routes<commit_after>
|
import pytz
from datetime import datetime, timedelta
from django.test import TestCase
from api.factories import TopicFactory
from rest_framework.test import APIRequestFactory, force_authenticate
from api.factories import UserFactory
from api.serializers import TopicSerializer
from api.views.topics import TopicViewSet
class TopicAPITest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.active_view = TopicViewSet.as_view({'get': 'active'})
self.inactive_view = TopicViewSet.as_view({'get': 'inactive'})
self.user = UserFactory()
self.active_topics_count = 5
self.inactive_topics_count = 5
TopicFactory.create_batch(self.active_topics_count, start=datetime.now(pytz.utc) - timedelta(days=3),
end=datetime.now(pytz.utc) + timedelta(days=4))
TopicFactory.create_batch(self.inactive_topics_count, start=datetime.now(pytz.utc) + timedelta(weeks=1),
end=datetime.now(pytz.utc) + timedelta(weeks=2))
def test_active_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/active")
force_authenticate(request, user=self.user)
response = self.active_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.active_topics_count, len(topics))
for topic in topics:
self.assertTrue(topic.is_active())
def test_inactive_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/inactive")
force_authenticate(request, user=self.user)
response = self.inactive_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.inactive_topics_count, len(topics))
for topic in topics:
self.assertFalse(topic.is_active())
|
Write tests for active and inactive list routesimport pytz
from datetime import datetime, timedelta
from django.test import TestCase
from api.factories import TopicFactory
from rest_framework.test import APIRequestFactory, force_authenticate
from api.factories import UserFactory
from api.serializers import TopicSerializer
from api.views.topics import TopicViewSet
class TopicAPITest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.active_view = TopicViewSet.as_view({'get': 'active'})
self.inactive_view = TopicViewSet.as_view({'get': 'inactive'})
self.user = UserFactory()
self.active_topics_count = 5
self.inactive_topics_count = 5
TopicFactory.create_batch(self.active_topics_count, start=datetime.now(pytz.utc) - timedelta(days=3),
end=datetime.now(pytz.utc) + timedelta(days=4))
TopicFactory.create_batch(self.inactive_topics_count, start=datetime.now(pytz.utc) + timedelta(weeks=1),
end=datetime.now(pytz.utc) + timedelta(weeks=2))
def test_active_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/active")
force_authenticate(request, user=self.user)
response = self.active_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.active_topics_count, len(topics))
for topic in topics:
self.assertTrue(topic.is_active())
def test_inactive_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/inactive")
force_authenticate(request, user=self.user)
response = self.inactive_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.inactive_topics_count, len(topics))
for topic in topics:
self.assertFalse(topic.is_active())
|
<commit_before><commit_msg>Write tests for active and inactive list routes<commit_after>import pytz
from datetime import datetime, timedelta
from django.test import TestCase
from api.factories import TopicFactory
from rest_framework.test import APIRequestFactory, force_authenticate
from api.factories import UserFactory
from api.serializers import TopicSerializer
from api.views.topics import TopicViewSet
class TopicAPITest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.active_view = TopicViewSet.as_view({'get': 'active'})
self.inactive_view = TopicViewSet.as_view({'get': 'inactive'})
self.user = UserFactory()
self.active_topics_count = 5
self.inactive_topics_count = 5
TopicFactory.create_batch(self.active_topics_count, start=datetime.now(pytz.utc) - timedelta(days=3),
end=datetime.now(pytz.utc) + timedelta(days=4))
TopicFactory.create_batch(self.inactive_topics_count, start=datetime.now(pytz.utc) + timedelta(weeks=1),
end=datetime.now(pytz.utc) + timedelta(weeks=2))
def test_active_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/active")
force_authenticate(request, user=self.user)
response = self.active_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.active_topics_count, len(topics))
for topic in topics:
self.assertTrue(topic.is_active())
def test_inactive_topics_are_returned_correctly(self):
request = self.factory.get("api/topics/inactive")
force_authenticate(request, user=self.user)
response = self.inactive_view(request)
topics = TopicSerializer(data=response.data, many=True)
topics.is_valid(raise_exception=True)
topics = topics.create(topics.validated_data)
self.assertEqual(self.inactive_topics_count, len(topics))
for topic in topics:
self.assertFalse(topic.is_active())
|
|
fb876d0ddc12fa89db5e3bec519ad4e93e914b8d
|
designate/tests/unit/api/test_version.py
|
designate/tests/unit/api/test_version.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
import oslotest.base
import webtest
from designate.api import versions
from designate.common import constants
CONF = cfg.CONF
class TestApiVersion(oslotest.base.BaseTestCase):
def setUp(self):
super(TestApiVersion, self).setUp()
self.useFixture(cfg_fixture.Config(CONF))
def test_add_a_version(self):
api_url = 'http://localhost/v2'
results = []
versions._add_a_version(
results, 'v2.1', api_url, constants.EXPERIMENTAL,
'2022-08-10T00:00:00Z')
self.assertEqual(1, len(results))
self.assertEqual('v2.1', results[0]['id'])
self.assertEqual(constants.EXPERIMENTAL, results[0]['status'])
self.assertEqual('2022-08-10T00:00:00Z', results[0]['updated'])
self.assertEqual(2, len(results[0]['links']))
def test_get_versions(self):
CONF.set_override('enable_host_header', False, 'service:api')
CONF.set_override(
'api_base_uri', 'http://127.0.0.2:9001/', 'service:api'
)
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://127.0.0.2:9001/v2',
response.json['versions'][0]['links'][0]['href']
)
def test_get_versions_with_enable_host_header(self):
CONF.set_override('enable_host_header', True, 'service:api')
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://localhost/v2',
response.json['versions'][0]['links'][0]['href']
)
|
Add basic api version test coverage
|
Add basic api version test coverage
Change-Id: Idb04e81ce5954ca8b5e387dc7c0776fdf7d08779
|
Python
|
apache-2.0
|
openstack/designate,openstack/designate,openstack/designate
|
Add basic api version test coverage
Change-Id: Idb04e81ce5954ca8b5e387dc7c0776fdf7d08779
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
import oslotest.base
import webtest
from designate.api import versions
from designate.common import constants
CONF = cfg.CONF
class TestApiVersion(oslotest.base.BaseTestCase):
def setUp(self):
super(TestApiVersion, self).setUp()
self.useFixture(cfg_fixture.Config(CONF))
def test_add_a_version(self):
api_url = 'http://localhost/v2'
results = []
versions._add_a_version(
results, 'v2.1', api_url, constants.EXPERIMENTAL,
'2022-08-10T00:00:00Z')
self.assertEqual(1, len(results))
self.assertEqual('v2.1', results[0]['id'])
self.assertEqual(constants.EXPERIMENTAL, results[0]['status'])
self.assertEqual('2022-08-10T00:00:00Z', results[0]['updated'])
self.assertEqual(2, len(results[0]['links']))
def test_get_versions(self):
CONF.set_override('enable_host_header', False, 'service:api')
CONF.set_override(
'api_base_uri', 'http://127.0.0.2:9001/', 'service:api'
)
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://127.0.0.2:9001/v2',
response.json['versions'][0]['links'][0]['href']
)
def test_get_versions_with_enable_host_header(self):
CONF.set_override('enable_host_header', True, 'service:api')
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://localhost/v2',
response.json['versions'][0]['links'][0]['href']
)
|
<commit_before><commit_msg>Add basic api version test coverage
Change-Id: Idb04e81ce5954ca8b5e387dc7c0776fdf7d08779<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
import oslotest.base
import webtest
from designate.api import versions
from designate.common import constants
CONF = cfg.CONF
class TestApiVersion(oslotest.base.BaseTestCase):
def setUp(self):
super(TestApiVersion, self).setUp()
self.useFixture(cfg_fixture.Config(CONF))
def test_add_a_version(self):
api_url = 'http://localhost/v2'
results = []
versions._add_a_version(
results, 'v2.1', api_url, constants.EXPERIMENTAL,
'2022-08-10T00:00:00Z')
self.assertEqual(1, len(results))
self.assertEqual('v2.1', results[0]['id'])
self.assertEqual(constants.EXPERIMENTAL, results[0]['status'])
self.assertEqual('2022-08-10T00:00:00Z', results[0]['updated'])
self.assertEqual(2, len(results[0]['links']))
def test_get_versions(self):
CONF.set_override('enable_host_header', False, 'service:api')
CONF.set_override(
'api_base_uri', 'http://127.0.0.2:9001/', 'service:api'
)
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://127.0.0.2:9001/v2',
response.json['versions'][0]['links'][0]['href']
)
def test_get_versions_with_enable_host_header(self):
CONF.set_override('enable_host_header', True, 'service:api')
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://localhost/v2',
response.json['versions'][0]['links'][0]['href']
)
|
Add basic api version test coverage
Change-Id: Idb04e81ce5954ca8b5e387dc7c0776fdf7d08779# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
import oslotest.base
import webtest
from designate.api import versions
from designate.common import constants
CONF = cfg.CONF
class TestApiVersion(oslotest.base.BaseTestCase):
def setUp(self):
super(TestApiVersion, self).setUp()
self.useFixture(cfg_fixture.Config(CONF))
def test_add_a_version(self):
api_url = 'http://localhost/v2'
results = []
versions._add_a_version(
results, 'v2.1', api_url, constants.EXPERIMENTAL,
'2022-08-10T00:00:00Z')
self.assertEqual(1, len(results))
self.assertEqual('v2.1', results[0]['id'])
self.assertEqual(constants.EXPERIMENTAL, results[0]['status'])
self.assertEqual('2022-08-10T00:00:00Z', results[0]['updated'])
self.assertEqual(2, len(results[0]['links']))
def test_get_versions(self):
CONF.set_override('enable_host_header', False, 'service:api')
CONF.set_override(
'api_base_uri', 'http://127.0.0.2:9001/', 'service:api'
)
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://127.0.0.2:9001/v2',
response.json['versions'][0]['links'][0]['href']
)
def test_get_versions_with_enable_host_header(self):
CONF.set_override('enable_host_header', True, 'service:api')
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://localhost/v2',
response.json['versions'][0]['links'][0]['href']
)
|
<commit_before><commit_msg>Add basic api version test coverage
Change-Id: Idb04e81ce5954ca8b5e387dc7c0776fdf7d08779<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
import oslotest.base
import webtest
from designate.api import versions
from designate.common import constants
CONF = cfg.CONF
class TestApiVersion(oslotest.base.BaseTestCase):
def setUp(self):
super(TestApiVersion, self).setUp()
self.useFixture(cfg_fixture.Config(CONF))
def test_add_a_version(self):
api_url = 'http://localhost/v2'
results = []
versions._add_a_version(
results, 'v2.1', api_url, constants.EXPERIMENTAL,
'2022-08-10T00:00:00Z')
self.assertEqual(1, len(results))
self.assertEqual('v2.1', results[0]['id'])
self.assertEqual(constants.EXPERIMENTAL, results[0]['status'])
self.assertEqual('2022-08-10T00:00:00Z', results[0]['updated'])
self.assertEqual(2, len(results[0]['links']))
def test_get_versions(self):
CONF.set_override('enable_host_header', False, 'service:api')
CONF.set_override(
'api_base_uri', 'http://127.0.0.2:9001/', 'service:api'
)
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://127.0.0.2:9001/v2',
response.json['versions'][0]['links'][0]['href']
)
def test_get_versions_with_enable_host_header(self):
CONF.set_override('enable_host_header', True, 'service:api')
self.app = versions.factory({})
self.client = webtest.TestApp(self.app)
response = self.client.get('/')
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual(2, len(response.json['versions']))
self.assertEqual(
'http://localhost/v2',
response.json['versions'][0]['links'][0]['href']
)
|
|
964895852bb9acbe866c1a7bc7ba98b972100fa8
|
readthedocs/builds/migrations/0014_migrate-doctype-from-project-to-version.py
|
readthedocs/builds/migrations/0014_migrate-doctype-from-project-to-version.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-14 17:40
from django.db import migrations
from django.db.models import OuterRef, Subquery
def forwards_func(apps, schema_editor):
"""Migrate ``Project.documentation_type`` to ``Version.documentation_type``."""
Version = apps.get_model('builds', 'Version')
Project = apps.get_model('projects', 'Project')
Version.objects.all().update(
documentation_type=Subquery(
Project.objects
.filter(pk=OuterRef('project'))
.values('documentation_type')[:1]
),
)
class Migration(migrations.Migration):
dependencies = [
('builds', '0013_version_documentation_type'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
Migrate documentation_type from Project to Version
|
Migrate documentation_type from Project to Version
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
Migrate documentation_type from Project to Version
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-14 17:40
from django.db import migrations
from django.db.models import OuterRef, Subquery
def forwards_func(apps, schema_editor):
"""Migrate ``Project.documentation_type`` to ``Version.documentation_type``."""
Version = apps.get_model('builds', 'Version')
Project = apps.get_model('projects', 'Project')
Version.objects.all().update(
documentation_type=Subquery(
Project.objects
.filter(pk=OuterRef('project'))
.values('documentation_type')[:1]
),
)
class Migration(migrations.Migration):
dependencies = [
('builds', '0013_version_documentation_type'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
<commit_before><commit_msg>Migrate documentation_type from Project to Version<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-14 17:40
from django.db import migrations
from django.db.models import OuterRef, Subquery
def forwards_func(apps, schema_editor):
"""Migrate ``Project.documentation_type`` to ``Version.documentation_type``."""
Version = apps.get_model('builds', 'Version')
Project = apps.get_model('projects', 'Project')
Version.objects.all().update(
documentation_type=Subquery(
Project.objects
.filter(pk=OuterRef('project'))
.values('documentation_type')[:1]
),
)
class Migration(migrations.Migration):
dependencies = [
('builds', '0013_version_documentation_type'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
Migrate documentation_type from Project to Version# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-14 17:40
from django.db import migrations
from django.db.models import OuterRef, Subquery
def forwards_func(apps, schema_editor):
"""Migrate ``Project.documentation_type`` to ``Version.documentation_type``."""
Version = apps.get_model('builds', 'Version')
Project = apps.get_model('projects', 'Project')
Version.objects.all().update(
documentation_type=Subquery(
Project.objects
.filter(pk=OuterRef('project'))
.values('documentation_type')[:1]
),
)
class Migration(migrations.Migration):
dependencies = [
('builds', '0013_version_documentation_type'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
<commit_before><commit_msg>Migrate documentation_type from Project to Version<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-14 17:40
from django.db import migrations
from django.db.models import OuterRef, Subquery
def forwards_func(apps, schema_editor):
"""Migrate ``Project.documentation_type`` to ``Version.documentation_type``."""
Version = apps.get_model('builds', 'Version')
Project = apps.get_model('projects', 'Project')
Version.objects.all().update(
documentation_type=Subquery(
Project.objects
.filter(pk=OuterRef('project'))
.values('documentation_type')[:1]
),
)
class Migration(migrations.Migration):
dependencies = [
('builds', '0013_version_documentation_type'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
|
adca0c2ff48b716e9ac3ef706d66d86943fb29b7
|
cpm_data/migrations/0015_add_jury_members_to_seasons_12_14.py
|
cpm_data/migrations/0015_add_jury_members_to_seasons_12_14.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from django.db import migrations
def add_to_season(apps, schema_editor, season_name, jury_names):
JuryMember = apps.get_model('cpm_data.JuryMember')
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
for name in jury_names:
try:
jury_member = JuryMember.objects.get(name_en=name)
except JuryMember.DoesNotExist:
raise
SeasonJuryMember.objects.get_or_create(season=season,
jury_member=jury_member)
def remove_from_season(apps, schema_editor, season_name, jury_names):
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
season_jury_members = SeasonJuryMember.objects.filter(
season=season, jury_member__name_en__in=jury_names
)
season_jury_members.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0014_remove_jury_duplicates'),
]
operations = [
migrations.RunPython(
partial(add_to_season,
season_name=season_name,
jury_names=jury_names),
partial(remove_from_season,
season_name=season_name,
jury_names=jury_names),
) for season_name, jury_names in [
('2012', ['Agricola de Cologne', 'Victor Aslyuk', 'Maciej Gil',
'Pavel Ivanov', 'Yuri Igrusha', 'Andrew Kureichik',
'Sergey Krasikov', 'Bohdana Smirnova', 'Cory McAbee']),
('2013', ['Volha Dashuk', 'Irina Demyanova', 'Anton Sidorenko',
'Pavel Ivanov', 'Yuri Igrusha', 'Sergey Krasikov',
'Bohdana Smirnova', 'Jon Rubin', 'Maciej Gil',
'Pierre-Luc Vaillancourt', 'Karsten Weber',
'Lee Sang-woo', 'Cory McAbee']),
('2014', ['Yuri Igrusha']), # more added in previous migrations
]
]
|
Add jury to 2012-2014 - take the data from results pages
|
Add jury to 2012-2014 - take the data from results pages
|
Python
|
unlicense
|
kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by
|
Add jury to 2012-2014 - take the data from results pages
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from django.db import migrations
def add_to_season(apps, schema_editor, season_name, jury_names):
JuryMember = apps.get_model('cpm_data.JuryMember')
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
for name in jury_names:
try:
jury_member = JuryMember.objects.get(name_en=name)
except JuryMember.DoesNotExist:
raise
SeasonJuryMember.objects.get_or_create(season=season,
jury_member=jury_member)
def remove_from_season(apps, schema_editor, season_name, jury_names):
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
season_jury_members = SeasonJuryMember.objects.filter(
season=season, jury_member__name_en__in=jury_names
)
season_jury_members.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0014_remove_jury_duplicates'),
]
operations = [
migrations.RunPython(
partial(add_to_season,
season_name=season_name,
jury_names=jury_names),
partial(remove_from_season,
season_name=season_name,
jury_names=jury_names),
) for season_name, jury_names in [
('2012', ['Agricola de Cologne', 'Victor Aslyuk', 'Maciej Gil',
'Pavel Ivanov', 'Yuri Igrusha', 'Andrew Kureichik',
'Sergey Krasikov', 'Bohdana Smirnova', 'Cory McAbee']),
('2013', ['Volha Dashuk', 'Irina Demyanova', 'Anton Sidorenko',
'Pavel Ivanov', 'Yuri Igrusha', 'Sergey Krasikov',
'Bohdana Smirnova', 'Jon Rubin', 'Maciej Gil',
'Pierre-Luc Vaillancourt', 'Karsten Weber',
'Lee Sang-woo', 'Cory McAbee']),
('2014', ['Yuri Igrusha']), # more added in previous migrations
]
]
|
<commit_before><commit_msg>Add jury to 2012-2014 - take the data from results pages<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from django.db import migrations
def add_to_season(apps, schema_editor, season_name, jury_names):
JuryMember = apps.get_model('cpm_data.JuryMember')
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
for name in jury_names:
try:
jury_member = JuryMember.objects.get(name_en=name)
except JuryMember.DoesNotExist:
raise
SeasonJuryMember.objects.get_or_create(season=season,
jury_member=jury_member)
def remove_from_season(apps, schema_editor, season_name, jury_names):
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
season_jury_members = SeasonJuryMember.objects.filter(
season=season, jury_member__name_en__in=jury_names
)
season_jury_members.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0014_remove_jury_duplicates'),
]
operations = [
migrations.RunPython(
partial(add_to_season,
season_name=season_name,
jury_names=jury_names),
partial(remove_from_season,
season_name=season_name,
jury_names=jury_names),
) for season_name, jury_names in [
('2012', ['Agricola de Cologne', 'Victor Aslyuk', 'Maciej Gil',
'Pavel Ivanov', 'Yuri Igrusha', 'Andrew Kureichik',
'Sergey Krasikov', 'Bohdana Smirnova', 'Cory McAbee']),
('2013', ['Volha Dashuk', 'Irina Demyanova', 'Anton Sidorenko',
'Pavel Ivanov', 'Yuri Igrusha', 'Sergey Krasikov',
'Bohdana Smirnova', 'Jon Rubin', 'Maciej Gil',
'Pierre-Luc Vaillancourt', 'Karsten Weber',
'Lee Sang-woo', 'Cory McAbee']),
('2014', ['Yuri Igrusha']), # more added in previous migrations
]
]
|
Add jury to 2012-2014 - take the data from results pages# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from django.db import migrations
def add_to_season(apps, schema_editor, season_name, jury_names):
JuryMember = apps.get_model('cpm_data.JuryMember')
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
for name in jury_names:
try:
jury_member = JuryMember.objects.get(name_en=name)
except JuryMember.DoesNotExist:
raise
SeasonJuryMember.objects.get_or_create(season=season,
jury_member=jury_member)
def remove_from_season(apps, schema_editor, season_name, jury_names):
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
season_jury_members = SeasonJuryMember.objects.filter(
season=season, jury_member__name_en__in=jury_names
)
season_jury_members.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0014_remove_jury_duplicates'),
]
operations = [
migrations.RunPython(
partial(add_to_season,
season_name=season_name,
jury_names=jury_names),
partial(remove_from_season,
season_name=season_name,
jury_names=jury_names),
) for season_name, jury_names in [
('2012', ['Agricola de Cologne', 'Victor Aslyuk', 'Maciej Gil',
'Pavel Ivanov', 'Yuri Igrusha', 'Andrew Kureichik',
'Sergey Krasikov', 'Bohdana Smirnova', 'Cory McAbee']),
('2013', ['Volha Dashuk', 'Irina Demyanova', 'Anton Sidorenko',
'Pavel Ivanov', 'Yuri Igrusha', 'Sergey Krasikov',
'Bohdana Smirnova', 'Jon Rubin', 'Maciej Gil',
'Pierre-Luc Vaillancourt', 'Karsten Weber',
'Lee Sang-woo', 'Cory McAbee']),
('2014', ['Yuri Igrusha']), # more added in previous migrations
]
]
|
<commit_before><commit_msg>Add jury to 2012-2014 - take the data from results pages<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from django.db import migrations
def add_to_season(apps, schema_editor, season_name, jury_names):
JuryMember = apps.get_model('cpm_data.JuryMember')
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
for name in jury_names:
try:
jury_member = JuryMember.objects.get(name_en=name)
except JuryMember.DoesNotExist:
raise
SeasonJuryMember.objects.get_or_create(season=season,
jury_member=jury_member)
def remove_from_season(apps, schema_editor, season_name, jury_names):
Season = apps.get_model('cpm_data.Season')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
season = Season.objects.get(name_en=season_name)
season_jury_members = SeasonJuryMember.objects.filter(
season=season, jury_member__name_en__in=jury_names
)
season_jury_members.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0014_remove_jury_duplicates'),
]
operations = [
migrations.RunPython(
partial(add_to_season,
season_name=season_name,
jury_names=jury_names),
partial(remove_from_season,
season_name=season_name,
jury_names=jury_names),
) for season_name, jury_names in [
('2012', ['Agricola de Cologne', 'Victor Aslyuk', 'Maciej Gil',
'Pavel Ivanov', 'Yuri Igrusha', 'Andrew Kureichik',
'Sergey Krasikov', 'Bohdana Smirnova', 'Cory McAbee']),
('2013', ['Volha Dashuk', 'Irina Demyanova', 'Anton Sidorenko',
'Pavel Ivanov', 'Yuri Igrusha', 'Sergey Krasikov',
'Bohdana Smirnova', 'Jon Rubin', 'Maciej Gil',
'Pierre-Luc Vaillancourt', 'Karsten Weber',
'Lee Sang-woo', 'Cory McAbee']),
('2014', ['Yuri Igrusha']), # more added in previous migrations
]
]
|
|
48a37b00bb73ea0398644e0eb22c50c34919b325
|
examples/non_terminating_checks_test.py
|
examples/non_terminating_checks_test.py
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_non_terminating_checks(self):
self.open('http://xkcd.com/993/')
self.wait_for_element('#comic')
self.check_assert_element('img[alt="Brand Identity"]')
self.check_assert_element('img[alt="Rocket Ship"]') # Will Fail
self.check_assert_element('#comicmap')
self.check_assert_text('Fake Item', '#middleContainer') # Will Fail
self.check_assert_text('Random', '#middleContainer')
self.check_assert_element('a[name="Super Fake !!!"]') # Will Fail
self.process_checks()
|
Add example test for non-terminating checks
|
Add example test for non-terminating checks
|
Python
|
mit
|
mdmintz/SeleniumBase,possoumous/Watchers,possoumous/Watchers,ktp420/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,ktp420/SeleniumBase,possoumous/Watchers,mdmintz/seleniumspot,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,ktp420/SeleniumBase,possoumous/Watchers,mdmintz/SeleniumBase
|
Add example test for non-terminating checks
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_non_terminating_checks(self):
self.open('http://xkcd.com/993/')
self.wait_for_element('#comic')
self.check_assert_element('img[alt="Brand Identity"]')
self.check_assert_element('img[alt="Rocket Ship"]') # Will Fail
self.check_assert_element('#comicmap')
self.check_assert_text('Fake Item', '#middleContainer') # Will Fail
self.check_assert_text('Random', '#middleContainer')
self.check_assert_element('a[name="Super Fake !!!"]') # Will Fail
self.process_checks()
|
<commit_before><commit_msg>Add example test for non-terminating checks<commit_after>
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_non_terminating_checks(self):
self.open('http://xkcd.com/993/')
self.wait_for_element('#comic')
self.check_assert_element('img[alt="Brand Identity"]')
self.check_assert_element('img[alt="Rocket Ship"]') # Will Fail
self.check_assert_element('#comicmap')
self.check_assert_text('Fake Item', '#middleContainer') # Will Fail
self.check_assert_text('Random', '#middleContainer')
self.check_assert_element('a[name="Super Fake !!!"]') # Will Fail
self.process_checks()
|
Add example test for non-terminating checksfrom seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_non_terminating_checks(self):
self.open('http://xkcd.com/993/')
self.wait_for_element('#comic')
self.check_assert_element('img[alt="Brand Identity"]')
self.check_assert_element('img[alt="Rocket Ship"]') # Will Fail
self.check_assert_element('#comicmap')
self.check_assert_text('Fake Item', '#middleContainer') # Will Fail
self.check_assert_text('Random', '#middleContainer')
self.check_assert_element('a[name="Super Fake !!!"]') # Will Fail
self.process_checks()
|
<commit_before><commit_msg>Add example test for non-terminating checks<commit_after>from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_non_terminating_checks(self):
self.open('http://xkcd.com/993/')
self.wait_for_element('#comic')
self.check_assert_element('img[alt="Brand Identity"]')
self.check_assert_element('img[alt="Rocket Ship"]') # Will Fail
self.check_assert_element('#comicmap')
self.check_assert_text('Fake Item', '#middleContainer') # Will Fail
self.check_assert_text('Random', '#middleContainer')
self.check_assert_element('a[name="Super Fake !!!"]') # Will Fail
self.process_checks()
|
|
6e02077b0aab178a2d8100a26811a485a74852f2
|
flexget/plugins/urlrewrite_shortened.py
|
flexget/plugins/urlrewrite_shortened.py
|
from __future__ import unicode_literals, division, absolute_import
import logging
from urlparse import urlparse
from flexget import plugin
from flexget.utils import requests
from flexget.event import event
log = logging.getLogger('shortened')
class UrlRewriteShortened(object):
"""Shortened url rewriter."""
def url_rewritable(self, task, entry):
return urlparse(entry['url']).netloc in ['bit.ly', 't.co']
def url_rewrite(self, task, entry):
request = task.requests.head(entry['url'], allow_redirects=True)
entry['url'] = request.url
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteShortened, 'shortened', groups=['urlrewriter'], api_ver=2)
|
Add url rewriter for shortened urls
|
Add url rewriter for shortened urls
|
Python
|
mit
|
ratoaq2/Flexget,gazpachoking/Flexget,qvazzler/Flexget,Flexget/Flexget,X-dark/Flexget,poulpito/Flexget,LynxyssCZ/Flexget,offbyone/Flexget,poulpito/Flexget,X-dark/Flexget,sean797/Flexget,malkavi/Flexget,tsnoam/Flexget,JorisDeRieck/Flexget,Pretagonist/Flexget,jawilson/Flexget,OmgOhnoes/Flexget,Pretagonist/Flexget,qvazzler/Flexget,grrr2/Flexget,thalamus/Flexget,jacobmetrick/Flexget,oxc/Flexget,voriux/Flexget,Pretagonist/Flexget,tarzasai/Flexget,OmgOhnoes/Flexget,patsissons/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,cvium/Flexget,voriux/Flexget,thalamus/Flexget,jawilson/Flexget,ZefQ/Flexget,offbyone/Flexget,ianstalk/Flexget,crawln45/Flexget,OmgOhnoes/Flexget,antivirtel/Flexget,ibrahimkarahan/Flexget,sean797/Flexget,drwyrm/Flexget,drwyrm/Flexget,qk4l/Flexget,v17al/Flexget,xfouloux/Flexget,patsissons/Flexget,tvcsantos/Flexget,vfrc2/Flexget,tsnoam/Flexget,antivirtel/Flexget,dsemi/Flexget,Flexget/Flexget,ratoaq2/Flexget,tvcsantos/Flexget,poulpito/Flexget,grrr2/Flexget,spencerjanssen/Flexget,oxc/Flexget,crawln45/Flexget,Danfocus/Flexget,qk4l/Flexget,tobinjt/Flexget,malkavi/Flexget,crawln45/Flexget,oxc/Flexget,lildadou/Flexget,vfrc2/Flexget,jacobmetrick/Flexget,xfouloux/Flexget,xfouloux/Flexget,tobinjt/Flexget,ratoaq2/Flexget,tobinjt/Flexget,ianstalk/Flexget,gazpachoking/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,Danfocus/Flexget,qk4l/Flexget,jawilson/Flexget,drwyrm/Flexget,tsnoam/Flexget,X-dark/Flexget,vfrc2/Flexget,tarzasai/Flexget,LynxyssCZ/Flexget,ZefQ/Flexget,malkavi/Flexget,v17al/Flexget,Danfocus/Flexget,LynxyssCZ/Flexget,antivirtel/Flexget,grrr2/Flexget,dsemi/Flexget,v17al/Flexget,crawln45/Flexget,jawilson/Flexget,ibrahimkarahan/Flexget,spencerjanssen/Flexget,spencerjanssen/Flexget,cvium/Flexget,patsissons/Flexget,Flexget/Flexget,cvium/Flexget,tarzasai/Flexget,ianstalk/Flexget,ZefQ/Flexget,sean797/Flexget,JorisDeRieck/Flexget,thalamus/Flexget,malkavi/Flexget,JorisDeRieck/Flexget,Danfocus/Flexget,ibrahimkarahan/Flexget,qvazzler/Flexget,lildadou/Flexget,camon/Flexget,tobinjt/Flexget,dsemi/Flexget,camon/Flexget,offbyone/Flexget,jacobmetrick/Flexget
|
Add url rewriter for shortened urls
|
from __future__ import unicode_literals, division, absolute_import
import logging
from urlparse import urlparse
from flexget import plugin
from flexget.utils import requests
from flexget.event import event
log = logging.getLogger('shortened')
class UrlRewriteShortened(object):
"""Shortened url rewriter."""
def url_rewritable(self, task, entry):
return urlparse(entry['url']).netloc in ['bit.ly', 't.co']
def url_rewrite(self, task, entry):
request = task.requests.head(entry['url'], allow_redirects=True)
entry['url'] = request.url
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteShortened, 'shortened', groups=['urlrewriter'], api_ver=2)
|
<commit_before><commit_msg>Add url rewriter for shortened urls<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
import logging
from urlparse import urlparse
from flexget import plugin
from flexget.utils import requests
from flexget.event import event
log = logging.getLogger('shortened')
class UrlRewriteShortened(object):
"""Shortened url rewriter."""
def url_rewritable(self, task, entry):
return urlparse(entry['url']).netloc in ['bit.ly', 't.co']
def url_rewrite(self, task, entry):
request = task.requests.head(entry['url'], allow_redirects=True)
entry['url'] = request.url
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteShortened, 'shortened', groups=['urlrewriter'], api_ver=2)
|
Add url rewriter for shortened urlsfrom __future__ import unicode_literals, division, absolute_import
import logging
from urlparse import urlparse
from flexget import plugin
from flexget.utils import requests
from flexget.event import event
log = logging.getLogger('shortened')
class UrlRewriteShortened(object):
"""Shortened url rewriter."""
def url_rewritable(self, task, entry):
return urlparse(entry['url']).netloc in ['bit.ly', 't.co']
def url_rewrite(self, task, entry):
request = task.requests.head(entry['url'], allow_redirects=True)
entry['url'] = request.url
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteShortened, 'shortened', groups=['urlrewriter'], api_ver=2)
|
<commit_before><commit_msg>Add url rewriter for shortened urls<commit_after>from __future__ import unicode_literals, division, absolute_import
import logging
from urlparse import urlparse
from flexget import plugin
from flexget.utils import requests
from flexget.event import event
log = logging.getLogger('shortened')
class UrlRewriteShortened(object):
"""Shortened url rewriter."""
def url_rewritable(self, task, entry):
return urlparse(entry['url']).netloc in ['bit.ly', 't.co']
def url_rewrite(self, task, entry):
request = task.requests.head(entry['url'], allow_redirects=True)
entry['url'] = request.url
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteShortened, 'shortened', groups=['urlrewriter'], api_ver=2)
|
|
2ffcae174d7ff10aa816b8fb839c9c0ae6fb3689
|
bidwire/alembic/versions/a1b42c9006a7_absolute_massgov_eopss_url.py
|
bidwire/alembic/versions/a1b42c9006a7_absolute_massgov_eopss_url.py
|
"""absolute_massgov_eopss_url
Revision ID: a1b42c9006a7
Revises: 9b30b0fe231a
Create Date: 2017-06-26 00:02:45.998655
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from document import Document
from urllib import parse
# revision identifiers, used by Alembic.
revision = 'a1b42c9006a7'
down_revision = '9b30b0fe231a'
branch_labels = None
depends_on = None
def upgrade():
def ensure_absolute(url):
root_url = "https://www.mass.gov/"
if not url.startswith(root_url):
return parse.urljoin(root_url, url)
return url
# Attach to the migration's session
session = Session(bind=op.get_bind())
docs = session.query(Document).filter(
Document.site == Document.Site.MASSGOV_EOPSS.name).all()
for doc in docs:
doc.url = ensure_absolute(doc.url)
session.add_all(docs)
session.commit()
def downgrade():
# Do nothing for the rollback.
pass
|
Add database migration to make all Massgov Eopss URLs absolute.
|
Add database migration to make all Massgov Eopss URLs absolute.
|
Python
|
mit
|
RagtagOpen/bidwire,RagtagOpen/bidwire,RagtagOpen/bidwire
|
Add database migration to make all Massgov Eopss URLs absolute.
|
"""absolute_massgov_eopss_url
Revision ID: a1b42c9006a7
Revises: 9b30b0fe231a
Create Date: 2017-06-26 00:02:45.998655
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from document import Document
from urllib import parse
# revision identifiers, used by Alembic.
revision = 'a1b42c9006a7'
down_revision = '9b30b0fe231a'
branch_labels = None
depends_on = None
def upgrade():
def ensure_absolute(url):
root_url = "https://www.mass.gov/"
if not url.startswith(root_url):
return parse.urljoin(root_url, url)
return url
# Attach to the migration's session
session = Session(bind=op.get_bind())
docs = session.query(Document).filter(
Document.site == Document.Site.MASSGOV_EOPSS.name).all()
for doc in docs:
doc.url = ensure_absolute(doc.url)
session.add_all(docs)
session.commit()
def downgrade():
# Do nothing for the rollback.
pass
|
<commit_before><commit_msg>Add database migration to make all Massgov Eopss URLs absolute.<commit_after>
|
"""absolute_massgov_eopss_url
Revision ID: a1b42c9006a7
Revises: 9b30b0fe231a
Create Date: 2017-06-26 00:02:45.998655
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from document import Document
from urllib import parse
# revision identifiers, used by Alembic.
revision = 'a1b42c9006a7'
down_revision = '9b30b0fe231a'
branch_labels = None
depends_on = None
def upgrade():
def ensure_absolute(url):
root_url = "https://www.mass.gov/"
if not url.startswith(root_url):
return parse.urljoin(root_url, url)
return url
# Attach to the migration's session
session = Session(bind=op.get_bind())
docs = session.query(Document).filter(
Document.site == Document.Site.MASSGOV_EOPSS.name).all()
for doc in docs:
doc.url = ensure_absolute(doc.url)
session.add_all(docs)
session.commit()
def downgrade():
# Do nothing for the rollback.
pass
|
Add database migration to make all Massgov Eopss URLs absolute."""absolute_massgov_eopss_url
Revision ID: a1b42c9006a7
Revises: 9b30b0fe231a
Create Date: 2017-06-26 00:02:45.998655
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from document import Document
from urllib import parse
# revision identifiers, used by Alembic.
revision = 'a1b42c9006a7'
down_revision = '9b30b0fe231a'
branch_labels = None
depends_on = None
def upgrade():
def ensure_absolute(url):
root_url = "https://www.mass.gov/"
if not url.startswith(root_url):
return parse.urljoin(root_url, url)
return url
# Attach to the migration's session
session = Session(bind=op.get_bind())
docs = session.query(Document).filter(
Document.site == Document.Site.MASSGOV_EOPSS.name).all()
for doc in docs:
doc.url = ensure_absolute(doc.url)
session.add_all(docs)
session.commit()
def downgrade():
# Do nothing for the rollback.
pass
|
<commit_before><commit_msg>Add database migration to make all Massgov Eopss URLs absolute.<commit_after>"""absolute_massgov_eopss_url
Revision ID: a1b42c9006a7
Revises: 9b30b0fe231a
Create Date: 2017-06-26 00:02:45.998655
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm.session import Session
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from document import Document
from urllib import parse
# revision identifiers, used by Alembic.
revision = 'a1b42c9006a7'
down_revision = '9b30b0fe231a'
branch_labels = None
depends_on = None
def upgrade():
def ensure_absolute(url):
root_url = "https://www.mass.gov/"
if not url.startswith(root_url):
return parse.urljoin(root_url, url)
return url
# Attach to the migration's session
session = Session(bind=op.get_bind())
docs = session.query(Document).filter(
Document.site == Document.Site.MASSGOV_EOPSS.name).all()
for doc in docs:
doc.url = ensure_absolute(doc.url)
session.add_all(docs)
session.commit()
def downgrade():
# Do nothing for the rollback.
pass
|
|
cfca3ecf6d36611ff0ffe3537f01c2711e94f87e
|
kpub/tests/test_counts.py
|
kpub/tests/test_counts.py
|
import kpub
def test_annual_count():
# Does the cumulative count match the annual count?
db = kpub.PublicationDB()
annual = db.get_annual_publication_count()
cumul = db.get_annual_publication_count_cumulative()
assert annual['k2'][2010] == 0 # K2 didn't exist in 2010
# The first K2 papers started appearing in 2014; the cumulative counts should reflect that:
assert (annual['k2'][2014] + annual['k2'][2015]) == cumul['k2'][2015]
assert (annual['k2'][2014] + annual['k2'][2015] + annual['k2'][2016]) == cumul['k2'][2016]
|
Add unit test for get_annual_publication_count
|
Add unit test for get_annual_publication_count
|
Python
|
mit
|
KeplerGO/kpub
|
Add unit test for get_annual_publication_count
|
import kpub
def test_annual_count():
# Does the cumulative count match the annual count?
db = kpub.PublicationDB()
annual = db.get_annual_publication_count()
cumul = db.get_annual_publication_count_cumulative()
assert annual['k2'][2010] == 0 # K2 didn't exist in 2010
# The first K2 papers started appearing in 2014; the cumulative counts should reflect that:
assert (annual['k2'][2014] + annual['k2'][2015]) == cumul['k2'][2015]
assert (annual['k2'][2014] + annual['k2'][2015] + annual['k2'][2016]) == cumul['k2'][2016]
|
<commit_before><commit_msg>Add unit test for get_annual_publication_count<commit_after>
|
import kpub
def test_annual_count():
# Does the cumulative count match the annual count?
db = kpub.PublicationDB()
annual = db.get_annual_publication_count()
cumul = db.get_annual_publication_count_cumulative()
assert annual['k2'][2010] == 0 # K2 didn't exist in 2010
# The first K2 papers started appearing in 2014; the cumulative counts should reflect that:
assert (annual['k2'][2014] + annual['k2'][2015]) == cumul['k2'][2015]
assert (annual['k2'][2014] + annual['k2'][2015] + annual['k2'][2016]) == cumul['k2'][2016]
|
Add unit test for get_annual_publication_countimport kpub
def test_annual_count():
# Does the cumulative count match the annual count?
db = kpub.PublicationDB()
annual = db.get_annual_publication_count()
cumul = db.get_annual_publication_count_cumulative()
assert annual['k2'][2010] == 0 # K2 didn't exist in 2010
# The first K2 papers started appearing in 2014; the cumulative counts should reflect that:
assert (annual['k2'][2014] + annual['k2'][2015]) == cumul['k2'][2015]
assert (annual['k2'][2014] + annual['k2'][2015] + annual['k2'][2016]) == cumul['k2'][2016]
|
<commit_before><commit_msg>Add unit test for get_annual_publication_count<commit_after>import kpub
def test_annual_count():
# Does the cumulative count match the annual count?
db = kpub.PublicationDB()
annual = db.get_annual_publication_count()
cumul = db.get_annual_publication_count_cumulative()
assert annual['k2'][2010] == 0 # K2 didn't exist in 2010
# The first K2 papers started appearing in 2014; the cumulative counts should reflect that:
assert (annual['k2'][2014] + annual['k2'][2015]) == cumul['k2'][2015]
assert (annual['k2'][2014] + annual['k2'][2015] + annual['k2'][2016]) == cumul['k2'][2016]
|
|
d0b98a062eaca03dba53f610bb4b326d769464e9
|
foundation/organisation/migrations/0008_auto_20160707_0752.py
|
foundation/organisation/migrations/0008_auto_20160707_0752.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0007_add_old_project_bool_to_project_model'),
]
operations = [
migrations.AlterField(
model_name='networkgroupmembership',
name='order',
field=models.IntegerField(help_text=b'The lower the number the higher on the page this Person will be shown.', null=True, blank=True),
),
]
|
Add updated help text for order in networkgroup
|
Add updated help text for order in networkgroup
|
Python
|
mit
|
okfn/website,okfn/foundation,okfn/website,okfn/website,okfn/foundation,okfn/foundation,okfn/foundation,okfn/website
|
Add updated help text for order in networkgroup
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0007_add_old_project_bool_to_project_model'),
]
operations = [
migrations.AlterField(
model_name='networkgroupmembership',
name='order',
field=models.IntegerField(help_text=b'The lower the number the higher on the page this Person will be shown.', null=True, blank=True),
),
]
|
<commit_before><commit_msg>Add updated help text for order in networkgroup<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0007_add_old_project_bool_to_project_model'),
]
operations = [
migrations.AlterField(
model_name='networkgroupmembership',
name='order',
field=models.IntegerField(help_text=b'The lower the number the higher on the page this Person will be shown.', null=True, blank=True),
),
]
|
Add updated help text for order in networkgroup# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0007_add_old_project_bool_to_project_model'),
]
operations = [
migrations.AlterField(
model_name='networkgroupmembership',
name='order',
field=models.IntegerField(help_text=b'The lower the number the higher on the page this Person will be shown.', null=True, blank=True),
),
]
|
<commit_before><commit_msg>Add updated help text for order in networkgroup<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0007_add_old_project_bool_to_project_model'),
]
operations = [
migrations.AlterField(
model_name='networkgroupmembership',
name='order',
field=models.IntegerField(help_text=b'The lower the number the higher on the page this Person will be shown.', null=True, blank=True),
),
]
|
|
3075a2dfcd51b932fd142e51e6bdc1694ab4e750
|
gitlabform/gitlabform/test/test_tags.py
|
gitlabform/gitlabform/test/test_tags.py
|
from gitlabform.gitlabform.test import (
run_gitlabform,
)
class TestArchiveProject:
def test__archive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
config = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(config, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
def test__unarchive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
unarchive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: false
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
run_gitlabform(unarchive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is False
def test__dont_edit_archived_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
edit_archived_project = f"""
# the project has to be configured as archived
# for other configs for it to be ignored
projects_and_groups:
{group_and_project}:
project:
archive: true
{group}/*:
files:
README.md:
overwrite: true
branches:
- main
content: |
Some other content that the default one
"""
run_gitlabform(edit_archived_project, group_and_project)
# the fact that we are not getting an exception because of trying to edit
# an archived project means that the test is passing
|
Add tests for protected tags
|
Add tests for protected tags
|
Python
|
mit
|
egnyte/gitlabform,egnyte/gitlabform
|
Add tests for protected tags
|
from gitlabform.gitlabform.test import (
run_gitlabform,
)
class TestArchiveProject:
def test__archive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
config = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(config, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
def test__unarchive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
unarchive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: false
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
run_gitlabform(unarchive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is False
def test__dont_edit_archived_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
edit_archived_project = f"""
# the project has to be configured as archived
# for other configs for it to be ignored
projects_and_groups:
{group_and_project}:
project:
archive: true
{group}/*:
files:
README.md:
overwrite: true
branches:
- main
content: |
Some other content that the default one
"""
run_gitlabform(edit_archived_project, group_and_project)
# the fact that we are not getting an exception because of trying to edit
# an archived project means that the test is passing
|
<commit_before><commit_msg>Add tests for protected tags<commit_after>
|
from gitlabform.gitlabform.test import (
run_gitlabform,
)
class TestArchiveProject:
def test__archive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
config = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(config, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
def test__unarchive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
unarchive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: false
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
run_gitlabform(unarchive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is False
def test__dont_edit_archived_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
edit_archived_project = f"""
# the project has to be configured as archived
# for other configs for it to be ignored
projects_and_groups:
{group_and_project}:
project:
archive: true
{group}/*:
files:
README.md:
overwrite: true
branches:
- main
content: |
Some other content that the default one
"""
run_gitlabform(edit_archived_project, group_and_project)
# the fact that we are not getting an exception because of trying to edit
# an archived project means that the test is passing
|
Add tests for protected tagsfrom gitlabform.gitlabform.test import (
run_gitlabform,
)
class TestArchiveProject:
def test__archive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
config = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(config, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
def test__unarchive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
unarchive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: false
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
run_gitlabform(unarchive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is False
def test__dont_edit_archived_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
edit_archived_project = f"""
# the project has to be configured as archived
# for other configs for it to be ignored
projects_and_groups:
{group_and_project}:
project:
archive: true
{group}/*:
files:
README.md:
overwrite: true
branches:
- main
content: |
Some other content that the default one
"""
run_gitlabform(edit_archived_project, group_and_project)
# the fact that we are not getting an exception because of trying to edit
# an archived project means that the test is passing
|
<commit_before><commit_msg>Add tests for protected tags<commit_after>from gitlabform.gitlabform.test import (
run_gitlabform,
)
class TestArchiveProject:
def test__archive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
config = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(config, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
def test__unarchive_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
unarchive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: false
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
run_gitlabform(unarchive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is False
def test__dont_edit_archived_project(self, gitlab, group, project):
group_and_project = f"{group}/{project}"
archive_project = f"""
projects_and_groups:
{group_and_project}:
project:
archive: true
"""
run_gitlabform(archive_project, group_and_project)
project = gitlab.get_project(group_and_project)
assert project["archived"] is True
edit_archived_project = f"""
# the project has to be configured as archived
# for other configs for it to be ignored
projects_and_groups:
{group_and_project}:
project:
archive: true
{group}/*:
files:
README.md:
overwrite: true
branches:
- main
content: |
Some other content that the default one
"""
run_gitlabform(edit_archived_project, group_and_project)
# the fact that we are not getting an exception because of trying to edit
# an archived project means that the test is passing
|
|
1ac9175d661cc3d42416f3f55c5c8212467c612c
|
make_plots.py
|
make_plots.py
|
"""Make plots of the results of Dakotathon experiments."""
import numpy as np
import matplotlib.pyplot as mpl
def read_dat_header(dat_file):
try:
with open(dat_file, 'r') as fp:
names = fp.readline().split()
except IOError:
pass
else:
return names
def read_dat_file(dat_file):
names = read_dat_header(dat_file)
rnames = range(len(names))
rnames.pop(names.index('interface'))
return np.loadtxt(dat_file, skiprows=1, unpack=True, usecols=rnames)
def make_stacked_surface_plot():
pass
def make_pdf_and_cdf_plot():
pass
if __name__ == '__main__':
make_stacked_surface_plot()
make_pdf_and_cdf_plot()
|
Read Dakota tabular graphics output file
|
Read Dakota tabular graphics output file
|
Python
|
mit
|
mdpiper/AGU-2016
|
Read Dakota tabular graphics output file
|
"""Make plots of the results of Dakotathon experiments."""
import numpy as np
import matplotlib.pyplot as mpl
def read_dat_header(dat_file):
try:
with open(dat_file, 'r') as fp:
names = fp.readline().split()
except IOError:
pass
else:
return names
def read_dat_file(dat_file):
names = read_dat_header(dat_file)
rnames = range(len(names))
rnames.pop(names.index('interface'))
return np.loadtxt(dat_file, skiprows=1, unpack=True, usecols=rnames)
def make_stacked_surface_plot():
pass
def make_pdf_and_cdf_plot():
pass
if __name__ == '__main__':
make_stacked_surface_plot()
make_pdf_and_cdf_plot()
|
<commit_before><commit_msg>Read Dakota tabular graphics output file<commit_after>
|
"""Make plots of the results of Dakotathon experiments."""
import numpy as np
import matplotlib.pyplot as mpl
def read_dat_header(dat_file):
try:
with open(dat_file, 'r') as fp:
names = fp.readline().split()
except IOError:
pass
else:
return names
def read_dat_file(dat_file):
names = read_dat_header(dat_file)
rnames = range(len(names))
rnames.pop(names.index('interface'))
return np.loadtxt(dat_file, skiprows=1, unpack=True, usecols=rnames)
def make_stacked_surface_plot():
pass
def make_pdf_and_cdf_plot():
pass
if __name__ == '__main__':
make_stacked_surface_plot()
make_pdf_and_cdf_plot()
|
Read Dakota tabular graphics output file"""Make plots of the results of Dakotathon experiments."""
import numpy as np
import matplotlib.pyplot as mpl
def read_dat_header(dat_file):
try:
with open(dat_file, 'r') as fp:
names = fp.readline().split()
except IOError:
pass
else:
return names
def read_dat_file(dat_file):
names = read_dat_header(dat_file)
rnames = range(len(names))
rnames.pop(names.index('interface'))
return np.loadtxt(dat_file, skiprows=1, unpack=True, usecols=rnames)
def make_stacked_surface_plot():
pass
def make_pdf_and_cdf_plot():
pass
if __name__ == '__main__':
make_stacked_surface_plot()
make_pdf_and_cdf_plot()
|
<commit_before><commit_msg>Read Dakota tabular graphics output file<commit_after>"""Make plots of the results of Dakotathon experiments."""
import numpy as np
import matplotlib.pyplot as mpl
def read_dat_header(dat_file):
try:
with open(dat_file, 'r') as fp:
names = fp.readline().split()
except IOError:
pass
else:
return names
def read_dat_file(dat_file):
names = read_dat_header(dat_file)
rnames = range(len(names))
rnames.pop(names.index('interface'))
return np.loadtxt(dat_file, skiprows=1, unpack=True, usecols=rnames)
def make_stacked_surface_plot():
pass
def make_pdf_and_cdf_plot():
pass
if __name__ == '__main__':
make_stacked_surface_plot()
make_pdf_and_cdf_plot()
|
|
38978ce807966a1367ae2ad95dc81f87a08c2ca3
|
pbm2sh.py
|
pbm2sh.py
|
import sys
import re
with open(sys.argv[1]) as f:
magic = f.readline().strip()
if magic != "P1":
print "Expected file to start with P1"
sys.exit(1)
f.readline() # Ignore comment line
w, h = [int(n) for n in f.readline().strip().split(" ")]
pixels = re.sub(r"[^01]", "", f.read())
lines = [pixels[i:i+w] for i in xrange(0, len(pixels), w)]
if len(lines) > 64:
print "Max 64 lines"
sys.exit(1)
lines.extend(["0"*w] * (64 % len(lines))) # Pad out to 64 lines
cols = w
if cols > 128:
print "Max 128 columns"
sys.exit(1)
for i in range(cols):
for byteoff in range(8):
bits = []
for bitoff in range(8):
row = lines[byteoff * 8 + bitoff]
bits.append(row[i])
print "write_data", " ".join(reversed(bits))
print ""
|
Add script to generate script from pbm files
|
Add script to generate script from pbm files
|
Python
|
mit
|
pib/gpio_ssd1306,pib/gpio_ssd1306
|
Add script to generate script from pbm files
|
import sys
import re
with open(sys.argv[1]) as f:
magic = f.readline().strip()
if magic != "P1":
print "Expected file to start with P1"
sys.exit(1)
f.readline() # Ignore comment line
w, h = [int(n) for n in f.readline().strip().split(" ")]
pixels = re.sub(r"[^01]", "", f.read())
lines = [pixels[i:i+w] for i in xrange(0, len(pixels), w)]
if len(lines) > 64:
print "Max 64 lines"
sys.exit(1)
lines.extend(["0"*w] * (64 % len(lines))) # Pad out to 64 lines
cols = w
if cols > 128:
print "Max 128 columns"
sys.exit(1)
for i in range(cols):
for byteoff in range(8):
bits = []
for bitoff in range(8):
row = lines[byteoff * 8 + bitoff]
bits.append(row[i])
print "write_data", " ".join(reversed(bits))
print ""
|
<commit_before><commit_msg>Add script to generate script from pbm files<commit_after>
|
import sys
import re
with open(sys.argv[1]) as f:
magic = f.readline().strip()
if magic != "P1":
print "Expected file to start with P1"
sys.exit(1)
f.readline() # Ignore comment line
w, h = [int(n) for n in f.readline().strip().split(" ")]
pixels = re.sub(r"[^01]", "", f.read())
lines = [pixels[i:i+w] for i in xrange(0, len(pixels), w)]
if len(lines) > 64:
print "Max 64 lines"
sys.exit(1)
lines.extend(["0"*w] * (64 % len(lines))) # Pad out to 64 lines
cols = w
if cols > 128:
print "Max 128 columns"
sys.exit(1)
for i in range(cols):
for byteoff in range(8):
bits = []
for bitoff in range(8):
row = lines[byteoff * 8 + bitoff]
bits.append(row[i])
print "write_data", " ".join(reversed(bits))
print ""
|
Add script to generate script from pbm filesimport sys
import re
with open(sys.argv[1]) as f:
magic = f.readline().strip()
if magic != "P1":
print "Expected file to start with P1"
sys.exit(1)
f.readline() # Ignore comment line
w, h = [int(n) for n in f.readline().strip().split(" ")]
pixels = re.sub(r"[^01]", "", f.read())
lines = [pixels[i:i+w] for i in xrange(0, len(pixels), w)]
if len(lines) > 64:
print "Max 64 lines"
sys.exit(1)
lines.extend(["0"*w] * (64 % len(lines))) # Pad out to 64 lines
cols = w
if cols > 128:
print "Max 128 columns"
sys.exit(1)
for i in range(cols):
for byteoff in range(8):
bits = []
for bitoff in range(8):
row = lines[byteoff * 8 + bitoff]
bits.append(row[i])
print "write_data", " ".join(reversed(bits))
print ""
|
<commit_before><commit_msg>Add script to generate script from pbm files<commit_after>import sys
import re
with open(sys.argv[1]) as f:
magic = f.readline().strip()
if magic != "P1":
print "Expected file to start with P1"
sys.exit(1)
f.readline() # Ignore comment line
w, h = [int(n) for n in f.readline().strip().split(" ")]
pixels = re.sub(r"[^01]", "", f.read())
lines = [pixels[i:i+w] for i in xrange(0, len(pixels), w)]
if len(lines) > 64:
print "Max 64 lines"
sys.exit(1)
lines.extend(["0"*w] * (64 % len(lines))) # Pad out to 64 lines
cols = w
if cols > 128:
print "Max 128 columns"
sys.exit(1)
for i in range(cols):
for byteoff in range(8):
bits = []
for bitoff in range(8):
row = lines[byteoff * 8 + bitoff]
bits.append(row[i])
print "write_data", " ".join(reversed(bits))
print ""
|
|
5de36454db293c03039bec3c8c628995f07825b0
|
src/txkube/_compat.py
|
src/txkube/_compat.py
|
# Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
Helpers for Python 2/3 compatibility.
"""
from json import dumps
from twisted.python.compat import unicode
def dumps_bytes(obj):
"""
Serialize ``obj`` to JSON formatted ``bytes``.
"""
b = dumps(obj)
if isinstance(b, unicode):
b = b.encode("ascii")
return b
|
Add a dumps_bytes() helper method to handle Python 2/3 compatibility.
|
Add a dumps_bytes() helper method to handle Python 2/3 compatibility.
|
Python
|
mit
|
LeastAuthority/txkube
|
Add a dumps_bytes() helper method to handle Python 2/3 compatibility.
|
# Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
Helpers for Python 2/3 compatibility.
"""
from json import dumps
from twisted.python.compat import unicode
def dumps_bytes(obj):
"""
Serialize ``obj`` to JSON formatted ``bytes``.
"""
b = dumps(obj)
if isinstance(b, unicode):
b = b.encode("ascii")
return b
|
<commit_before><commit_msg>Add a dumps_bytes() helper method to handle Python 2/3 compatibility.<commit_after>
|
# Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
Helpers for Python 2/3 compatibility.
"""
from json import dumps
from twisted.python.compat import unicode
def dumps_bytes(obj):
"""
Serialize ``obj`` to JSON formatted ``bytes``.
"""
b = dumps(obj)
if isinstance(b, unicode):
b = b.encode("ascii")
return b
|
Add a dumps_bytes() helper method to handle Python 2/3 compatibility.# Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
Helpers for Python 2/3 compatibility.
"""
from json import dumps
from twisted.python.compat import unicode
def dumps_bytes(obj):
"""
Serialize ``obj`` to JSON formatted ``bytes``.
"""
b = dumps(obj)
if isinstance(b, unicode):
b = b.encode("ascii")
return b
|
<commit_before><commit_msg>Add a dumps_bytes() helper method to handle Python 2/3 compatibility.<commit_after># Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
Helpers for Python 2/3 compatibility.
"""
from json import dumps
from twisted.python.compat import unicode
def dumps_bytes(obj):
"""
Serialize ``obj`` to JSON formatted ``bytes``.
"""
b = dumps(obj)
if isinstance(b, unicode):
b = b.encode("ascii")
return b
|
|
d7f184dd7c41bb3cacba5f77c81ae961b3a12760
|
subsample_bam_file.py
|
subsample_bam_file.py
|
#!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <konrad@foerstner.org>"
__copyright__ = "2013 by Konrad Foerstner <konrad@foerstner.org>"
__license__ = "ISC license"
__email__ = "konrad@foerstner.org"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
|
Add script to subsample bam file entries
|
Add script to subsample bam file entries
|
Python
|
isc
|
konrad/kuf_bio_scripts
|
Add script to subsample bam file entries
|
#!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <konrad@foerstner.org>"
__copyright__ = "2013 by Konrad Foerstner <konrad@foerstner.org>"
__license__ = "ISC license"
__email__ = "konrad@foerstner.org"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
|
<commit_before><commit_msg>Add script to subsample bam file entries<commit_after>
|
#!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <konrad@foerstner.org>"
__copyright__ = "2013 by Konrad Foerstner <konrad@foerstner.org>"
__license__ = "ISC license"
__email__ = "konrad@foerstner.org"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
|
Add script to subsample bam file entries#!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <konrad@foerstner.org>"
__copyright__ = "2013 by Konrad Foerstner <konrad@foerstner.org>"
__license__ = "ISC license"
__email__ = "konrad@foerstner.org"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
|
<commit_before><commit_msg>Add script to subsample bam file entries<commit_after>#!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <konrad@foerstner.org>"
__copyright__ = "2013 by Konrad Foerstner <konrad@foerstner.org>"
__license__ = "ISC license"
__email__ = "konrad@foerstner.org"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
|
|
67120e72883d4a5fd86dca2fba26599e65e7ea39
|
every_election/apps/elections/management/commands/add_referendum.py
|
every_election/apps/elections/management/commands/add_referendum.py
|
from django.core.management import BaseCommand
from elections.models import (
Election,
ElectionType,
ModerationHistory,
ModerationStatuses,
)
from organisations.models.organisations import Organisation
class Command(BaseCommand):
help = """
Adds an election with an election type of referendum
Example usage:
python manage.py add_referendum --date 2021-10-07 \
--council croydon \
--election-title "Governance referendum" \
--official-identifier CRY \
--org-type local-authority
"""
def add_arguments(self, parser):
parser.add_argument(
"-d",
"--date",
action="store",
dest="date",
help="The date the referendum is taking place",
required=True,
)
parser.add_argument(
"-c",
"--council",
action="store",
dest="council",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"-t",
"--election-title",
action="store",
dest="election_title",
help="The election title to be used",
required=True,
)
parser.add_argument(
"--official-identifier",
action="store",
dest="official_identifier",
help="The official identifier for the related Organisation. Election will cover the whole organisation unless a division ID is passed",
required=True,
)
parser.add_argument(
"--org-type",
action="store",
dest="org_type",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"--division-id",
action="store",
dest="division_id",
help="ID of a OrganisationDivision if applicable",
required=False,
)
def handle(self, *args, **options):
election_date = options["date"]
election_id = f"ref.{options['council']}.{election_date}"
referendum_type = ElectionType.objects.get(election_type="ref")
ref_election, created = Election.private_objects.update_or_create(
election_id=election_id,
defaults={
"election_type": referendum_type,
"poll_open_date": election_date,
"election_title": options["election_title"],
"current": True,
"division_id": options["division_id"],
},
)
self.stdout.write(f"{'Created' if created else 'Updated'} {election_id}")
ModerationHistory.objects.get_or_create(
status_id=ModerationStatuses.approved.value,
election=ref_election,
)
org = Organisation.objects.get_by_date(
date=election_date,
official_identifier=options["official_identifier"],
organisation_type=options["org_type"],
)
ref_election.organisation = org
ref_election.organisation_geography = org.geographies.latest()
if ref_election.division:
ref_election.division_geography = ref_election.division.geography
ref_election.save()
|
Add management command to add a refendeum election
|
Add management command to add a refendeum election
- Added to allow us to add Croydon referendum initially, with
the possibility of being able to use again in the future as required
|
Python
|
bsd-3-clause
|
DemocracyClub/EveryElection,DemocracyClub/EveryElection,DemocracyClub/EveryElection
|
Add management command to add a refendeum election
- Added to allow us to add Croydon referendum initially, with
the possibility of being able to use again in the future as required
|
from django.core.management import BaseCommand
from elections.models import (
Election,
ElectionType,
ModerationHistory,
ModerationStatuses,
)
from organisations.models.organisations import Organisation
class Command(BaseCommand):
help = """
Adds an election with an election type of referendum
Example usage:
python manage.py add_referendum --date 2021-10-07 \
--council croydon \
--election-title "Governance referendum" \
--official-identifier CRY \
--org-type local-authority
"""
def add_arguments(self, parser):
parser.add_argument(
"-d",
"--date",
action="store",
dest="date",
help="The date the referendum is taking place",
required=True,
)
parser.add_argument(
"-c",
"--council",
action="store",
dest="council",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"-t",
"--election-title",
action="store",
dest="election_title",
help="The election title to be used",
required=True,
)
parser.add_argument(
"--official-identifier",
action="store",
dest="official_identifier",
help="The official identifier for the related Organisation. Election will cover the whole organisation unless a division ID is passed",
required=True,
)
parser.add_argument(
"--org-type",
action="store",
dest="org_type",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"--division-id",
action="store",
dest="division_id",
help="ID of a OrganisationDivision if applicable",
required=False,
)
def handle(self, *args, **options):
election_date = options["date"]
election_id = f"ref.{options['council']}.{election_date}"
referendum_type = ElectionType.objects.get(election_type="ref")
ref_election, created = Election.private_objects.update_or_create(
election_id=election_id,
defaults={
"election_type": referendum_type,
"poll_open_date": election_date,
"election_title": options["election_title"],
"current": True,
"division_id": options["division_id"],
},
)
self.stdout.write(f"{'Created' if created else 'Updated'} {election_id}")
ModerationHistory.objects.get_or_create(
status_id=ModerationStatuses.approved.value,
election=ref_election,
)
org = Organisation.objects.get_by_date(
date=election_date,
official_identifier=options["official_identifier"],
organisation_type=options["org_type"],
)
ref_election.organisation = org
ref_election.organisation_geography = org.geographies.latest()
if ref_election.division:
ref_election.division_geography = ref_election.division.geography
ref_election.save()
|
<commit_before><commit_msg>Add management command to add a refendeum election
- Added to allow us to add Croydon referendum initially, with
the possibility of being able to use again in the future as required<commit_after>
|
from django.core.management import BaseCommand
from elections.models import (
Election,
ElectionType,
ModerationHistory,
ModerationStatuses,
)
from organisations.models.organisations import Organisation
class Command(BaseCommand):
help = """
Adds an election with an election type of referendum
Example usage:
python manage.py add_referendum --date 2021-10-07 \
--council croydon \
--election-title "Governance referendum" \
--official-identifier CRY \
--org-type local-authority
"""
def add_arguments(self, parser):
parser.add_argument(
"-d",
"--date",
action="store",
dest="date",
help="The date the referendum is taking place",
required=True,
)
parser.add_argument(
"-c",
"--council",
action="store",
dest="council",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"-t",
"--election-title",
action="store",
dest="election_title",
help="The election title to be used",
required=True,
)
parser.add_argument(
"--official-identifier",
action="store",
dest="official_identifier",
help="The official identifier for the related Organisation. Election will cover the whole organisation unless a division ID is passed",
required=True,
)
parser.add_argument(
"--org-type",
action="store",
dest="org_type",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"--division-id",
action="store",
dest="division_id",
help="ID of a OrganisationDivision if applicable",
required=False,
)
def handle(self, *args, **options):
election_date = options["date"]
election_id = f"ref.{options['council']}.{election_date}"
referendum_type = ElectionType.objects.get(election_type="ref")
ref_election, created = Election.private_objects.update_or_create(
election_id=election_id,
defaults={
"election_type": referendum_type,
"poll_open_date": election_date,
"election_title": options["election_title"],
"current": True,
"division_id": options["division_id"],
},
)
self.stdout.write(f"{'Created' if created else 'Updated'} {election_id}")
ModerationHistory.objects.get_or_create(
status_id=ModerationStatuses.approved.value,
election=ref_election,
)
org = Organisation.objects.get_by_date(
date=election_date,
official_identifier=options["official_identifier"],
organisation_type=options["org_type"],
)
ref_election.organisation = org
ref_election.organisation_geography = org.geographies.latest()
if ref_election.division:
ref_election.division_geography = ref_election.division.geography
ref_election.save()
|
Add management command to add a refendeum election
- Added to allow us to add Croydon referendum initially, with
the possibility of being able to use again in the future as requiredfrom django.core.management import BaseCommand
from elections.models import (
Election,
ElectionType,
ModerationHistory,
ModerationStatuses,
)
from organisations.models.organisations import Organisation
class Command(BaseCommand):
help = """
Adds an election with an election type of referendum
Example usage:
python manage.py add_referendum --date 2021-10-07 \
--council croydon \
--election-title "Governance referendum" \
--official-identifier CRY \
--org-type local-authority
"""
def add_arguments(self, parser):
parser.add_argument(
"-d",
"--date",
action="store",
dest="date",
help="The date the referendum is taking place",
required=True,
)
parser.add_argument(
"-c",
"--council",
action="store",
dest="council",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"-t",
"--election-title",
action="store",
dest="election_title",
help="The election title to be used",
required=True,
)
parser.add_argument(
"--official-identifier",
action="store",
dest="official_identifier",
help="The official identifier for the related Organisation. Election will cover the whole organisation unless a division ID is passed",
required=True,
)
parser.add_argument(
"--org-type",
action="store",
dest="org_type",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"--division-id",
action="store",
dest="division_id",
help="ID of a OrganisationDivision if applicable",
required=False,
)
def handle(self, *args, **options):
election_date = options["date"]
election_id = f"ref.{options['council']}.{election_date}"
referendum_type = ElectionType.objects.get(election_type="ref")
ref_election, created = Election.private_objects.update_or_create(
election_id=election_id,
defaults={
"election_type": referendum_type,
"poll_open_date": election_date,
"election_title": options["election_title"],
"current": True,
"division_id": options["division_id"],
},
)
self.stdout.write(f"{'Created' if created else 'Updated'} {election_id}")
ModerationHistory.objects.get_or_create(
status_id=ModerationStatuses.approved.value,
election=ref_election,
)
org = Organisation.objects.get_by_date(
date=election_date,
official_identifier=options["official_identifier"],
organisation_type=options["org_type"],
)
ref_election.organisation = org
ref_election.organisation_geography = org.geographies.latest()
if ref_election.division:
ref_election.division_geography = ref_election.division.geography
ref_election.save()
|
<commit_before><commit_msg>Add management command to add a refendeum election
- Added to allow us to add Croydon referendum initially, with
the possibility of being able to use again in the future as required<commit_after>from django.core.management import BaseCommand
from elections.models import (
Election,
ElectionType,
ModerationHistory,
ModerationStatuses,
)
from organisations.models.organisations import Organisation
class Command(BaseCommand):
help = """
Adds an election with an election type of referendum
Example usage:
python manage.py add_referendum --date 2021-10-07 \
--council croydon \
--election-title "Governance referendum" \
--official-identifier CRY \
--org-type local-authority
"""
def add_arguments(self, parser):
parser.add_argument(
"-d",
"--date",
action="store",
dest="date",
help="The date the referendum is taking place",
required=True,
)
parser.add_argument(
"-c",
"--council",
action="store",
dest="council",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"-t",
"--election-title",
action="store",
dest="election_title",
help="The election title to be used",
required=True,
)
parser.add_argument(
"--official-identifier",
action="store",
dest="official_identifier",
help="The official identifier for the related Organisation. Election will cover the whole organisation unless a division ID is passed",
required=True,
)
parser.add_argument(
"--org-type",
action="store",
dest="org_type",
help="The council area the referendum is taking place",
required=True,
)
parser.add_argument(
"--division-id",
action="store",
dest="division_id",
help="ID of a OrganisationDivision if applicable",
required=False,
)
def handle(self, *args, **options):
election_date = options["date"]
election_id = f"ref.{options['council']}.{election_date}"
referendum_type = ElectionType.objects.get(election_type="ref")
ref_election, created = Election.private_objects.update_or_create(
election_id=election_id,
defaults={
"election_type": referendum_type,
"poll_open_date": election_date,
"election_title": options["election_title"],
"current": True,
"division_id": options["division_id"],
},
)
self.stdout.write(f"{'Created' if created else 'Updated'} {election_id}")
ModerationHistory.objects.get_or_create(
status_id=ModerationStatuses.approved.value,
election=ref_election,
)
org = Organisation.objects.get_by_date(
date=election_date,
official_identifier=options["official_identifier"],
organisation_type=options["org_type"],
)
ref_election.organisation = org
ref_election.organisation_geography = org.geographies.latest()
if ref_election.division:
ref_election.division_geography = ref_election.division.geography
ref_election.save()
|
|
c008d60fda4f3e3d58d123ab4a972016e669a7dc
|
tests/test_summary_downloader.py
|
tests/test_summary_downloader.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import itertools
import tempfile
from zipfile import ZipFile
from utils.summary_downloader import SummaryDownloader
def test_download_unzipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date, zip_summaries=False)
sdl.run()
tgt_dir = sdl.get_tgt_dir()
assert sorted(os.listdir(tgt_dir)) == sorted(files)
def test_download_zipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date)
sdl.run()
zip_path = sdl.get_zip_path()
zip = ZipFile(zip_path)
assert sorted(zip.namelist()) == sorted(files)
|
Add tests for download of game summaries
|
Add tests for download of game summaries
|
Python
|
mit
|
leaffan/pynhldb
|
Add tests for download of game summaries
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import itertools
import tempfile
from zipfile import ZipFile
from utils.summary_downloader import SummaryDownloader
def test_download_unzipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date, zip_summaries=False)
sdl.run()
tgt_dir = sdl.get_tgt_dir()
assert sorted(os.listdir(tgt_dir)) == sorted(files)
def test_download_zipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date)
sdl.run()
zip_path = sdl.get_zip_path()
zip = ZipFile(zip_path)
assert sorted(zip.namelist()) == sorted(files)
|
<commit_before><commit_msg>Add tests for download of game summaries<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import itertools
import tempfile
from zipfile import ZipFile
from utils.summary_downloader import SummaryDownloader
def test_download_unzipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date, zip_summaries=False)
sdl.run()
tgt_dir = sdl.get_tgt_dir()
assert sorted(os.listdir(tgt_dir)) == sorted(files)
def test_download_zipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date)
sdl.run()
zip_path = sdl.get_zip_path()
zip = ZipFile(zip_path)
assert sorted(zip.namelist()) == sorted(files)
|
Add tests for download of game summaries#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import itertools
import tempfile
from zipfile import ZipFile
from utils.summary_downloader import SummaryDownloader
def test_download_unzipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date, zip_summaries=False)
sdl.run()
tgt_dir = sdl.get_tgt_dir()
assert sorted(os.listdir(tgt_dir)) == sorted(files)
def test_download_zipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date)
sdl.run()
zip_path = sdl.get_zip_path()
zip = ZipFile(zip_path)
assert sorted(zip.namelist()) == sorted(files)
|
<commit_before><commit_msg>Add tests for download of game summaries<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import itertools
import tempfile
from zipfile import ZipFile
from utils.summary_downloader import SummaryDownloader
def test_download_unzipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date, zip_summaries=False)
sdl.run()
tgt_dir = sdl.get_tgt_dir()
assert sorted(os.listdir(tgt_dir)) == sorted(files)
def test_download_zipped():
date = "Oct 24, 2016"
tgt_dir = tempfile.mkdtemp(prefix='sdl_test_')
prefixes = ["ES", "FC", "GS", "PL", "RO", "SS", "TH", "TV"]
game_ids = ["020081", "020082"]
# setting up list of all HTML report files that should be downloaded for
# specified date
files = ["".join(c) + ".HTM" for c in list(
itertools.product(prefixes, game_ids))]
# adding JSON game report files
files.extend(["".join((gid, ".json")) for gid in game_ids])
# adding shootout report for one of the games
files.append("SO020082.HTM")
sdl = SummaryDownloader(tgt_dir, date)
sdl.run()
zip_path = sdl.get_zip_path()
zip = ZipFile(zip_path)
assert sorted(zip.namelist()) == sorted(files)
|
|
02a8a627bfffb1b0cda4455a2c639e92ef1d7a46
|
lib/portbuild/qthreads.py
|
lib/portbuild/qthreads.py
|
#! /usr/bin/env python
import threading
import time
from pika.adapters import BlockingConnection
class QueueThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = self.__class__.__name__
self.connection = BlockingConnection()
self._stop = threading.Event()
self.setDaemon(True)
def dprint(self, msg):
print "%s: %s" % (self.name, msg)
class QueueConsumerThread(QueueThread):
def __init__(self):
QueueThread.__init__(self)
def run(self):
self.setup()
self.channel.start_consuming()
def stop(self):
self.channel.stop_consuming()
self._stop.set()
class QueueProducerThread(QueueThread):
def __init__(self, freq=5):
QueueThread.__init__(self)
self.freq = freq
def run(self):
self.setup()
# Need to find something better to avoid active loops.
# Using signal.pause() when produce() is a no-op doesn't work.
while True:
self.produce()
time.sleep(self.freq)
def stop(self):
self.connection.close()
self._stop.set()
# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab
|
Add queue thread convenience classes.
|
Add queue thread convenience classes.
|
Python
|
bsd-2-clause
|
flz/portbuild-ng
|
Add queue thread convenience classes.
|
#! /usr/bin/env python
import threading
import time
from pika.adapters import BlockingConnection
class QueueThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = self.__class__.__name__
self.connection = BlockingConnection()
self._stop = threading.Event()
self.setDaemon(True)
def dprint(self, msg):
print "%s: %s" % (self.name, msg)
class QueueConsumerThread(QueueThread):
def __init__(self):
QueueThread.__init__(self)
def run(self):
self.setup()
self.channel.start_consuming()
def stop(self):
self.channel.stop_consuming()
self._stop.set()
class QueueProducerThread(QueueThread):
def __init__(self, freq=5):
QueueThread.__init__(self)
self.freq = freq
def run(self):
self.setup()
# Need to find something better to avoid active loops.
# Using signal.pause() when produce() is a no-op doesn't work.
while True:
self.produce()
time.sleep(self.freq)
def stop(self):
self.connection.close()
self._stop.set()
# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab
|
<commit_before><commit_msg>Add queue thread convenience classes.<commit_after>
|
#! /usr/bin/env python
import threading
import time
from pika.adapters import BlockingConnection
class QueueThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = self.__class__.__name__
self.connection = BlockingConnection()
self._stop = threading.Event()
self.setDaemon(True)
def dprint(self, msg):
print "%s: %s" % (self.name, msg)
class QueueConsumerThread(QueueThread):
def __init__(self):
QueueThread.__init__(self)
def run(self):
self.setup()
self.channel.start_consuming()
def stop(self):
self.channel.stop_consuming()
self._stop.set()
class QueueProducerThread(QueueThread):
def __init__(self, freq=5):
QueueThread.__init__(self)
self.freq = freq
def run(self):
self.setup()
# Need to find something better to avoid active loops.
# Using signal.pause() when produce() is a no-op doesn't work.
while True:
self.produce()
time.sleep(self.freq)
def stop(self):
self.connection.close()
self._stop.set()
# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab
|
Add queue thread convenience classes.#! /usr/bin/env python
import threading
import time
from pika.adapters import BlockingConnection
class QueueThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = self.__class__.__name__
self.connection = BlockingConnection()
self._stop = threading.Event()
self.setDaemon(True)
def dprint(self, msg):
print "%s: %s" % (self.name, msg)
class QueueConsumerThread(QueueThread):
def __init__(self):
QueueThread.__init__(self)
def run(self):
self.setup()
self.channel.start_consuming()
def stop(self):
self.channel.stop_consuming()
self._stop.set()
class QueueProducerThread(QueueThread):
def __init__(self, freq=5):
QueueThread.__init__(self)
self.freq = freq
def run(self):
self.setup()
# Need to find something better to avoid active loops.
# Using signal.pause() when produce() is a no-op doesn't work.
while True:
self.produce()
time.sleep(self.freq)
def stop(self):
self.connection.close()
self._stop.set()
# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab
|
<commit_before><commit_msg>Add queue thread convenience classes.<commit_after>#! /usr/bin/env python
import threading
import time
from pika.adapters import BlockingConnection
class QueueThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = self.__class__.__name__
self.connection = BlockingConnection()
self._stop = threading.Event()
self.setDaemon(True)
def dprint(self, msg):
print "%s: %s" % (self.name, msg)
class QueueConsumerThread(QueueThread):
def __init__(self):
QueueThread.__init__(self)
def run(self):
self.setup()
self.channel.start_consuming()
def stop(self):
self.channel.stop_consuming()
self._stop.set()
class QueueProducerThread(QueueThread):
def __init__(self, freq=5):
QueueThread.__init__(self)
self.freq = freq
def run(self):
self.setup()
# Need to find something better to avoid active loops.
# Using signal.pause() when produce() is a no-op doesn't work.
while True:
self.produce()
time.sleep(self.freq)
def stop(self):
self.connection.close()
self._stop.set()
# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab
|
|
71b308f123aff1b4498fe14188380a6352d34d49
|
numba/tests/builtins/test_builtin_pow.py
|
numba/tests/builtins/test_builtin_pow.py
|
# adapted from cython/tests/run/builtin_pow.pyx
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
>>> pow3_const()
3
>>> pow2(2,3)
8
>>> pow2(3,3)
27
>>> pow2_const()
8
"""
@autojit(backend='ast')
def pow3(a,b,c):
return pow(a,b,c)
@autojit(backend='ast')
def pow3_const():
return pow(2,3,5)
@autojit(backend='ast')
def pow2(a,b):
return pow(a,b)
@autojit(backend='ast')
def pow2_const():
return pow(2,3)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
Add testcase for pow builtin
|
Add testcase for pow builtin
|
Python
|
bsd-2-clause
|
stefanseefeld/numba,GaZ3ll3/numba,sklam/numba,gdementen/numba,numba/numba,gmarkall/numba,cpcloud/numba,gmarkall/numba,pitrou/numba,jriehl/numba,sklam/numba,pombredanne/numba,stonebig/numba,pitrou/numba,gdementen/numba,shiquanwang/numba,cpcloud/numba,stefanseefeld/numba,stefanseefeld/numba,pitrou/numba,stuartarchibald/numba,gdementen/numba,gdementen/numba,seibert/numba,seibert/numba,GaZ3ll3/numba,stonebig/numba,stuartarchibald/numba,seibert/numba,stuartarchibald/numba,sklam/numba,pitrou/numba,stonebig/numba,cpcloud/numba,numba/numba,seibert/numba,IntelLabs/numba,stonebig/numba,jriehl/numba,jriehl/numba,pombredanne/numba,gmarkall/numba,sklam/numba,IntelLabs/numba,GaZ3ll3/numba,gdementen/numba,sklam/numba,cpcloud/numba,IntelLabs/numba,seibert/numba,gmarkall/numba,gmarkall/numba,ssarangi/numba,stefanseefeld/numba,GaZ3ll3/numba,shiquanwang/numba,numba/numba,pombredanne/numba,pombredanne/numba,shiquanwang/numba,stefanseefeld/numba,numba/numba,ssarangi/numba,cpcloud/numba,pombredanne/numba,ssarangi/numba,IntelLabs/numba,ssarangi/numba,jriehl/numba,IntelLabs/numba,numba/numba,ssarangi/numba,jriehl/numba,stuartarchibald/numba,pitrou/numba,GaZ3ll3/numba,stuartarchibald/numba,stonebig/numba
|
Add testcase for pow builtin
|
# adapted from cython/tests/run/builtin_pow.pyx
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
>>> pow3_const()
3
>>> pow2(2,3)
8
>>> pow2(3,3)
27
>>> pow2_const()
8
"""
@autojit(backend='ast')
def pow3(a,b,c):
return pow(a,b,c)
@autojit(backend='ast')
def pow3_const():
return pow(2,3,5)
@autojit(backend='ast')
def pow2(a,b):
return pow(a,b)
@autojit(backend='ast')
def pow2_const():
return pow(2,3)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
<commit_before><commit_msg>Add testcase for pow builtin<commit_after>
|
# adapted from cython/tests/run/builtin_pow.pyx
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
>>> pow3_const()
3
>>> pow2(2,3)
8
>>> pow2(3,3)
27
>>> pow2_const()
8
"""
@autojit(backend='ast')
def pow3(a,b,c):
return pow(a,b,c)
@autojit(backend='ast')
def pow3_const():
return pow(2,3,5)
@autojit(backend='ast')
def pow2(a,b):
return pow(a,b)
@autojit(backend='ast')
def pow2_const():
return pow(2,3)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
Add testcase for pow builtin# adapted from cython/tests/run/builtin_pow.pyx
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
>>> pow3_const()
3
>>> pow2(2,3)
8
>>> pow2(3,3)
27
>>> pow2_const()
8
"""
@autojit(backend='ast')
def pow3(a,b,c):
return pow(a,b,c)
@autojit(backend='ast')
def pow3_const():
return pow(2,3,5)
@autojit(backend='ast')
def pow2(a,b):
return pow(a,b)
@autojit(backend='ast')
def pow2_const():
return pow(2,3)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
<commit_before><commit_msg>Add testcase for pow builtin<commit_after># adapted from cython/tests/run/builtin_pow.pyx
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
>>> pow3_const()
3
>>> pow2(2,3)
8
>>> pow2(3,3)
27
>>> pow2_const()
8
"""
@autojit(backend='ast')
def pow3(a,b,c):
return pow(a,b,c)
@autojit(backend='ast')
def pow3_const():
return pow(2,3,5)
@autojit(backend='ast')
def pow2(a,b):
return pow(a,b)
@autojit(backend='ast')
def pow2_const():
return pow(2,3)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
|
cf5a5594ebecc03c1087f09575dba1a480b575f3
|
tests/test_authjob.py
|
tests/test_authjob.py
|
from disco.test import DiscoJobTestFixture, DiscoTestCase
from disco.ddfs import DDFS
from disco.util import ddfs_name
from cStringIO import StringIO
class AuthJobTestCase(DiscoJobTestFixture, DiscoTestCase):
input = []
@staticmethod
def map(e, params):
return [(e.strip(), '')]
@property
def answers(self):
return [('blobdata', '')]
def setUp(self):
tag = 'disco:test:authjob'
self.ddfs = DDFS(self.disco_master_url)
pushed = self.ddfs.push(tag, [(StringIO('blobdata'), 'blob')])
self.ddfs.setattr(tag, 'ddfs:read-token', 'r')
self.input = ['tag://u:r@/' + tag]
super(AuthJobTestCase, self).setUp()
def tearDown(self):
super(AuthJobTestCase, self).tearDown()
self.ddfs.delete('disco:test:authjob')
|
Add a test case for auth protected inputs.
|
Add a test case for auth protected inputs.
|
Python
|
bsd-3-clause
|
seabirdzh/disco,pombredanne/disco,ktkt2009/disco,ErikDubbelboer/disco,pooya/disco,pombredanne/disco,pooya/disco,oldmantaiter/disco,pooya/disco,seabirdzh/disco,beni55/disco,ktkt2009/disco,ErikDubbelboer/disco,oldmantaiter/disco,discoproject/disco,simudream/disco,mwilliams3/disco,pavlobaron/disco_playground,ktkt2009/disco,scrapinghub/disco,beni55/disco,pombredanne/disco,oldmantaiter/disco,discoproject/disco,ktkt2009/disco,mwilliams3/disco,simudream/disco,mwilliams3/disco,ErikDubbelboer/disco,pombredanne/disco,simudream/disco,pooya/disco,mwilliams3/disco,oldmantaiter/disco,seabirdzh/disco,ktkt2009/disco,seabirdzh/disco,pavlobaron/disco_playground,beni55/disco,mozilla/disco,ErikDubbelboer/disco,oldmantaiter/disco,discoproject/disco,mozilla/disco,pavlobaron/disco_playground,mwilliams3/disco,scrapinghub/disco,pavlobaron/disco_playground,discoproject/disco,beni55/disco,seabirdzh/disco,simudream/disco,discoproject/disco,simudream/disco,mozilla/disco,scrapinghub/disco,pombredanne/disco,scrapinghub/disco,mozilla/disco,beni55/disco,ErikDubbelboer/disco
|
Add a test case for auth protected inputs.
|
from disco.test import DiscoJobTestFixture, DiscoTestCase
from disco.ddfs import DDFS
from disco.util import ddfs_name
from cStringIO import StringIO
class AuthJobTestCase(DiscoJobTestFixture, DiscoTestCase):
input = []
@staticmethod
def map(e, params):
return [(e.strip(), '')]
@property
def answers(self):
return [('blobdata', '')]
def setUp(self):
tag = 'disco:test:authjob'
self.ddfs = DDFS(self.disco_master_url)
pushed = self.ddfs.push(tag, [(StringIO('blobdata'), 'blob')])
self.ddfs.setattr(tag, 'ddfs:read-token', 'r')
self.input = ['tag://u:r@/' + tag]
super(AuthJobTestCase, self).setUp()
def tearDown(self):
super(AuthJobTestCase, self).tearDown()
self.ddfs.delete('disco:test:authjob')
|
<commit_before><commit_msg>Add a test case for auth protected inputs.<commit_after>
|
from disco.test import DiscoJobTestFixture, DiscoTestCase
from disco.ddfs import DDFS
from disco.util import ddfs_name
from cStringIO import StringIO
class AuthJobTestCase(DiscoJobTestFixture, DiscoTestCase):
input = []
@staticmethod
def map(e, params):
return [(e.strip(), '')]
@property
def answers(self):
return [('blobdata', '')]
def setUp(self):
tag = 'disco:test:authjob'
self.ddfs = DDFS(self.disco_master_url)
pushed = self.ddfs.push(tag, [(StringIO('blobdata'), 'blob')])
self.ddfs.setattr(tag, 'ddfs:read-token', 'r')
self.input = ['tag://u:r@/' + tag]
super(AuthJobTestCase, self).setUp()
def tearDown(self):
super(AuthJobTestCase, self).tearDown()
self.ddfs.delete('disco:test:authjob')
|
Add a test case for auth protected inputs.from disco.test import DiscoJobTestFixture, DiscoTestCase
from disco.ddfs import DDFS
from disco.util import ddfs_name
from cStringIO import StringIO
class AuthJobTestCase(DiscoJobTestFixture, DiscoTestCase):
input = []
@staticmethod
def map(e, params):
return [(e.strip(), '')]
@property
def answers(self):
return [('blobdata', '')]
def setUp(self):
tag = 'disco:test:authjob'
self.ddfs = DDFS(self.disco_master_url)
pushed = self.ddfs.push(tag, [(StringIO('blobdata'), 'blob')])
self.ddfs.setattr(tag, 'ddfs:read-token', 'r')
self.input = ['tag://u:r@/' + tag]
super(AuthJobTestCase, self).setUp()
def tearDown(self):
super(AuthJobTestCase, self).tearDown()
self.ddfs.delete('disco:test:authjob')
|
<commit_before><commit_msg>Add a test case for auth protected inputs.<commit_after>from disco.test import DiscoJobTestFixture, DiscoTestCase
from disco.ddfs import DDFS
from disco.util import ddfs_name
from cStringIO import StringIO
class AuthJobTestCase(DiscoJobTestFixture, DiscoTestCase):
input = []
@staticmethod
def map(e, params):
return [(e.strip(), '')]
@property
def answers(self):
return [('blobdata', '')]
def setUp(self):
tag = 'disco:test:authjob'
self.ddfs = DDFS(self.disco_master_url)
pushed = self.ddfs.push(tag, [(StringIO('blobdata'), 'blob')])
self.ddfs.setattr(tag, 'ddfs:read-token', 'r')
self.input = ['tag://u:r@/' + tag]
super(AuthJobTestCase, self).setUp()
def tearDown(self):
super(AuthJobTestCase, self).tearDown()
self.ddfs.delete('disco:test:authjob')
|
|
e6a57c43a693f71069778ddca3a28d5c73b75830
|
nova/scheduler/multi.py
|
nova/scheduler/multi.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler that allows routing some calls to one driver and others to another.
"""
from nova import flags
from nova import utils
from nova.scheduler import driver
FLAGS = flags.FLAGS
flags.DEFINE_string('compute_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling compute calls')
flags.DEFINE_string('volume_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling volume calls')
# A mapping of methods to topics so we can figure out which driver to use.
_METHOD_MAP = {'run_instance': 'compute',
'start_instance': 'compute',
'create_volume': 'volume'}
class MultiScheduler(driver.Scheduler):
"""A scheduler that holds multiple sub-schedulers.
This exists to allow flag-driven composibility of schedulers, allowing
third parties to integrate custom schedulers more easily.
"""
def __init__(self):
super(MultiScheduler, self).__init__()
compute_driver = utils.import_object(FLAGS.compute_scheduler_driver)
volume_driver = utils.import_object(FLAGS.volume_scheduler_driver)
self.drivers = {'compute': compute_driver,
'volume': volume_driver}
def __getattr__(self, key):
if not key.startswith('schedule_'):
raise AttributeError(key)
method = key[len('schedule_'):]
if method not in _METHOD_MAP:
raise AttributeError(key)
return getattr(self.drivers[_METHOD_MAP[method]], key)
def set_zone_manager(self, zone_manager):
for k, v in self.drivers.iteritems():
v.set_zone_manager(zone_manager)
def schedule(self, context, topic, *_args, **_kwargs):
return self.drivers[topic].schedule(context, topic, *_args, **_kwargs)
|
Allow different schedulers for compute and volume.
|
Allow different schedulers for compute and volume.
|
Python
|
apache-2.0
|
n0ano/ganttclient
|
Allow different schedulers for compute and volume.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler that allows routing some calls to one driver and others to another.
"""
from nova import flags
from nova import utils
from nova.scheduler import driver
FLAGS = flags.FLAGS
flags.DEFINE_string('compute_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling compute calls')
flags.DEFINE_string('volume_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling volume calls')
# A mapping of methods to topics so we can figure out which driver to use.
_METHOD_MAP = {'run_instance': 'compute',
'start_instance': 'compute',
'create_volume': 'volume'}
class MultiScheduler(driver.Scheduler):
"""A scheduler that holds multiple sub-schedulers.
This exists to allow flag-driven composibility of schedulers, allowing
third parties to integrate custom schedulers more easily.
"""
def __init__(self):
super(MultiScheduler, self).__init__()
compute_driver = utils.import_object(FLAGS.compute_scheduler_driver)
volume_driver = utils.import_object(FLAGS.volume_scheduler_driver)
self.drivers = {'compute': compute_driver,
'volume': volume_driver}
def __getattr__(self, key):
if not key.startswith('schedule_'):
raise AttributeError(key)
method = key[len('schedule_'):]
if method not in _METHOD_MAP:
raise AttributeError(key)
return getattr(self.drivers[_METHOD_MAP[method]], key)
def set_zone_manager(self, zone_manager):
for k, v in self.drivers.iteritems():
v.set_zone_manager(zone_manager)
def schedule(self, context, topic, *_args, **_kwargs):
return self.drivers[topic].schedule(context, topic, *_args, **_kwargs)
|
<commit_before><commit_msg>Allow different schedulers for compute and volume.<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler that allows routing some calls to one driver and others to another.
"""
from nova import flags
from nova import utils
from nova.scheduler import driver
FLAGS = flags.FLAGS
flags.DEFINE_string('compute_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling compute calls')
flags.DEFINE_string('volume_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling volume calls')
# A mapping of methods to topics so we can figure out which driver to use.
_METHOD_MAP = {'run_instance': 'compute',
'start_instance': 'compute',
'create_volume': 'volume'}
class MultiScheduler(driver.Scheduler):
"""A scheduler that holds multiple sub-schedulers.
This exists to allow flag-driven composibility of schedulers, allowing
third parties to integrate custom schedulers more easily.
"""
def __init__(self):
super(MultiScheduler, self).__init__()
compute_driver = utils.import_object(FLAGS.compute_scheduler_driver)
volume_driver = utils.import_object(FLAGS.volume_scheduler_driver)
self.drivers = {'compute': compute_driver,
'volume': volume_driver}
def __getattr__(self, key):
if not key.startswith('schedule_'):
raise AttributeError(key)
method = key[len('schedule_'):]
if method not in _METHOD_MAP:
raise AttributeError(key)
return getattr(self.drivers[_METHOD_MAP[method]], key)
def set_zone_manager(self, zone_manager):
for k, v in self.drivers.iteritems():
v.set_zone_manager(zone_manager)
def schedule(self, context, topic, *_args, **_kwargs):
return self.drivers[topic].schedule(context, topic, *_args, **_kwargs)
|
Allow different schedulers for compute and volume.# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler that allows routing some calls to one driver and others to another.
"""
from nova import flags
from nova import utils
from nova.scheduler import driver
FLAGS = flags.FLAGS
flags.DEFINE_string('compute_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling compute calls')
flags.DEFINE_string('volume_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling volume calls')
# A mapping of methods to topics so we can figure out which driver to use.
_METHOD_MAP = {'run_instance': 'compute',
'start_instance': 'compute',
'create_volume': 'volume'}
class MultiScheduler(driver.Scheduler):
"""A scheduler that holds multiple sub-schedulers.
This exists to allow flag-driven composibility of schedulers, allowing
third parties to integrate custom schedulers more easily.
"""
def __init__(self):
super(MultiScheduler, self).__init__()
compute_driver = utils.import_object(FLAGS.compute_scheduler_driver)
volume_driver = utils.import_object(FLAGS.volume_scheduler_driver)
self.drivers = {'compute': compute_driver,
'volume': volume_driver}
def __getattr__(self, key):
if not key.startswith('schedule_'):
raise AttributeError(key)
method = key[len('schedule_'):]
if method not in _METHOD_MAP:
raise AttributeError(key)
return getattr(self.drivers[_METHOD_MAP[method]], key)
def set_zone_manager(self, zone_manager):
for k, v in self.drivers.iteritems():
v.set_zone_manager(zone_manager)
def schedule(self, context, topic, *_args, **_kwargs):
return self.drivers[topic].schedule(context, topic, *_args, **_kwargs)
|
<commit_before><commit_msg>Allow different schedulers for compute and volume.<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler that allows routing some calls to one driver and others to another.
"""
from nova import flags
from nova import utils
from nova.scheduler import driver
FLAGS = flags.FLAGS
flags.DEFINE_string('compute_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling compute calls')
flags.DEFINE_string('volume_scheduler_driver',
'nova.scheduler.chance.ChanceScheduler',
'Driver to use for scheduling volume calls')
# A mapping of methods to topics so we can figure out which driver to use.
_METHOD_MAP = {'run_instance': 'compute',
'start_instance': 'compute',
'create_volume': 'volume'}
class MultiScheduler(driver.Scheduler):
"""A scheduler that holds multiple sub-schedulers.
This exists to allow flag-driven composibility of schedulers, allowing
third parties to integrate custom schedulers more easily.
"""
def __init__(self):
super(MultiScheduler, self).__init__()
compute_driver = utils.import_object(FLAGS.compute_scheduler_driver)
volume_driver = utils.import_object(FLAGS.volume_scheduler_driver)
self.drivers = {'compute': compute_driver,
'volume': volume_driver}
def __getattr__(self, key):
if not key.startswith('schedule_'):
raise AttributeError(key)
method = key[len('schedule_'):]
if method not in _METHOD_MAP:
raise AttributeError(key)
return getattr(self.drivers[_METHOD_MAP[method]], key)
def set_zone_manager(self, zone_manager):
for k, v in self.drivers.iteritems():
v.set_zone_manager(zone_manager)
def schedule(self, context, topic, *_args, **_kwargs):
return self.drivers[topic].schedule(context, topic, *_args, **_kwargs)
|
|
a32477899c7d21a82382f4c274cac129074b3e32
|
pombola/kenya/management/commands/kenya_import_2017_photos.py
|
pombola/kenya/management/commands/kenya_import_2017_photos.py
|
from __future__ import print_function
import csv
from os.path import abspath, dirname, exists, join
import shutil
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from images.models import Image
from pombola.core.models import Person
from pombola.core.utils import mkdir_p
from PIL import Image as PillowImage
import requests
results_directory = abspath(join(
dirname(__file__), '..', '..', 'election_data_2017', 'results'
))
class Command(BaseCommand):
help = 'Import photos for elected representatives from the 2017 election'
def handle_person(self, row):
person = Person.objects.get(
identifiers__scheme='ynr-ke', identifiers__identifier=row['id'])
image_url = row['image_url']
if not image_url:
return
# If we haven't already downloaded this file, download it:
image_filename = join(self.cache_directory, str(row['id']))
if not exists(image_filename):
r = requests.get(image_url, stream=True)
r.raise_for_status()
with open(image_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
# Check this is a file type we can cope with:
try:
pillow_image = PillowImage.open(image_filename)
except IOError as e:
if 'cannot identify image file' in e.args[0]:
print("Ignoring a non-image file {0}".format(image_filename))
return None
raise
if pillow_image.format not in ('PNG', 'JPEG'):
raise Exception("Found an unsupported image format: {0}".format(pillow_image.format))
extension = {'PNG': 'png', 'JPEG': 'jpg'}[pillow_image.format]
storage = FileSystemStorage()
desired_storage_path = join(
'images', 'kenya-ynr', '{}.{}'.format(row['id'], extension))
with open(image_filename, 'rb') as f:
storage_filename = storage.save(desired_storage_path, f)
Image.objects.create(
content_object=person,
source='http://kenya.ynr.mysociety.org/person/{0}'.format(row['id']),
is_primary=(not person.images.exists()),
image=storage_filename,
)
print("Created image for:", person)
def handle(self, **options):
self.cache_directory = join(results_directory, '.downloaded-images')
mkdir_p(self.cache_directory)
for filename in ('na.csv', 'senate.csv', 'wo.csv'):
full_filename = join(results_directory, filename)
with open(full_filename) as f:
reader = csv.DictReader(f)
for row in reader:
self.handle_person(row)
|
Add a script to import elected candidates' photos from YNR
|
Add a script to import elected candidates' photos from YNR
This script uses the YNR IDs that the kenya_import_2017_election_results
command added to import the photos from YNR and add them to the
appropriate person in Mzalendo. If there were existing photos of the
person, the YNR photo is not set as primary, otherwise it would be.
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
|
Add a script to import elected candidates' photos from YNR
This script uses the YNR IDs that the kenya_import_2017_election_results
command added to import the photos from YNR and add them to the
appropriate person in Mzalendo. If there were existing photos of the
person, the YNR photo is not set as primary, otherwise it would be.
|
from __future__ import print_function
import csv
from os.path import abspath, dirname, exists, join
import shutil
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from images.models import Image
from pombola.core.models import Person
from pombola.core.utils import mkdir_p
from PIL import Image as PillowImage
import requests
results_directory = abspath(join(
dirname(__file__), '..', '..', 'election_data_2017', 'results'
))
class Command(BaseCommand):
help = 'Import photos for elected representatives from the 2017 election'
def handle_person(self, row):
person = Person.objects.get(
identifiers__scheme='ynr-ke', identifiers__identifier=row['id'])
image_url = row['image_url']
if not image_url:
return
# If we haven't already downloaded this file, download it:
image_filename = join(self.cache_directory, str(row['id']))
if not exists(image_filename):
r = requests.get(image_url, stream=True)
r.raise_for_status()
with open(image_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
# Check this is a file type we can cope with:
try:
pillow_image = PillowImage.open(image_filename)
except IOError as e:
if 'cannot identify image file' in e.args[0]:
print("Ignoring a non-image file {0}".format(image_filename))
return None
raise
if pillow_image.format not in ('PNG', 'JPEG'):
raise Exception("Found an unsupported image format: {0}".format(pillow_image.format))
extension = {'PNG': 'png', 'JPEG': 'jpg'}[pillow_image.format]
storage = FileSystemStorage()
desired_storage_path = join(
'images', 'kenya-ynr', '{}.{}'.format(row['id'], extension))
with open(image_filename, 'rb') as f:
storage_filename = storage.save(desired_storage_path, f)
Image.objects.create(
content_object=person,
source='http://kenya.ynr.mysociety.org/person/{0}'.format(row['id']),
is_primary=(not person.images.exists()),
image=storage_filename,
)
print("Created image for:", person)
def handle(self, **options):
self.cache_directory = join(results_directory, '.downloaded-images')
mkdir_p(self.cache_directory)
for filename in ('na.csv', 'senate.csv', 'wo.csv'):
full_filename = join(results_directory, filename)
with open(full_filename) as f:
reader = csv.DictReader(f)
for row in reader:
self.handle_person(row)
|
<commit_before><commit_msg>Add a script to import elected candidates' photos from YNR
This script uses the YNR IDs that the kenya_import_2017_election_results
command added to import the photos from YNR and add them to the
appropriate person in Mzalendo. If there were existing photos of the
person, the YNR photo is not set as primary, otherwise it would be.<commit_after>
|
from __future__ import print_function
import csv
from os.path import abspath, dirname, exists, join
import shutil
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from images.models import Image
from pombola.core.models import Person
from pombola.core.utils import mkdir_p
from PIL import Image as PillowImage
import requests
results_directory = abspath(join(
dirname(__file__), '..', '..', 'election_data_2017', 'results'
))
class Command(BaseCommand):
help = 'Import photos for elected representatives from the 2017 election'
def handle_person(self, row):
person = Person.objects.get(
identifiers__scheme='ynr-ke', identifiers__identifier=row['id'])
image_url = row['image_url']
if not image_url:
return
# If we haven't already downloaded this file, download it:
image_filename = join(self.cache_directory, str(row['id']))
if not exists(image_filename):
r = requests.get(image_url, stream=True)
r.raise_for_status()
with open(image_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
# Check this is a file type we can cope with:
try:
pillow_image = PillowImage.open(image_filename)
except IOError as e:
if 'cannot identify image file' in e.args[0]:
print("Ignoring a non-image file {0}".format(image_filename))
return None
raise
if pillow_image.format not in ('PNG', 'JPEG'):
raise Exception("Found an unsupported image format: {0}".format(pillow_image.format))
extension = {'PNG': 'png', 'JPEG': 'jpg'}[pillow_image.format]
storage = FileSystemStorage()
desired_storage_path = join(
'images', 'kenya-ynr', '{}.{}'.format(row['id'], extension))
with open(image_filename, 'rb') as f:
storage_filename = storage.save(desired_storage_path, f)
Image.objects.create(
content_object=person,
source='http://kenya.ynr.mysociety.org/person/{0}'.format(row['id']),
is_primary=(not person.images.exists()),
image=storage_filename,
)
print("Created image for:", person)
def handle(self, **options):
self.cache_directory = join(results_directory, '.downloaded-images')
mkdir_p(self.cache_directory)
for filename in ('na.csv', 'senate.csv', 'wo.csv'):
full_filename = join(results_directory, filename)
with open(full_filename) as f:
reader = csv.DictReader(f)
for row in reader:
self.handle_person(row)
|
Add a script to import elected candidates' photos from YNR
This script uses the YNR IDs that the kenya_import_2017_election_results
command added to import the photos from YNR and add them to the
appropriate person in Mzalendo. If there were existing photos of the
person, the YNR photo is not set as primary, otherwise it would be.from __future__ import print_function
import csv
from os.path import abspath, dirname, exists, join
import shutil
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from images.models import Image
from pombola.core.models import Person
from pombola.core.utils import mkdir_p
from PIL import Image as PillowImage
import requests
results_directory = abspath(join(
dirname(__file__), '..', '..', 'election_data_2017', 'results'
))
class Command(BaseCommand):
help = 'Import photos for elected representatives from the 2017 election'
def handle_person(self, row):
person = Person.objects.get(
identifiers__scheme='ynr-ke', identifiers__identifier=row['id'])
image_url = row['image_url']
if not image_url:
return
# If we haven't already downloaded this file, download it:
image_filename = join(self.cache_directory, str(row['id']))
if not exists(image_filename):
r = requests.get(image_url, stream=True)
r.raise_for_status()
with open(image_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
# Check this is a file type we can cope with:
try:
pillow_image = PillowImage.open(image_filename)
except IOError as e:
if 'cannot identify image file' in e.args[0]:
print("Ignoring a non-image file {0}".format(image_filename))
return None
raise
if pillow_image.format not in ('PNG', 'JPEG'):
raise Exception("Found an unsupported image format: {0}".format(pillow_image.format))
extension = {'PNG': 'png', 'JPEG': 'jpg'}[pillow_image.format]
storage = FileSystemStorage()
desired_storage_path = join(
'images', 'kenya-ynr', '{}.{}'.format(row['id'], extension))
with open(image_filename, 'rb') as f:
storage_filename = storage.save(desired_storage_path, f)
Image.objects.create(
content_object=person,
source='http://kenya.ynr.mysociety.org/person/{0}'.format(row['id']),
is_primary=(not person.images.exists()),
image=storage_filename,
)
print("Created image for:", person)
def handle(self, **options):
self.cache_directory = join(results_directory, '.downloaded-images')
mkdir_p(self.cache_directory)
for filename in ('na.csv', 'senate.csv', 'wo.csv'):
full_filename = join(results_directory, filename)
with open(full_filename) as f:
reader = csv.DictReader(f)
for row in reader:
self.handle_person(row)
|
<commit_before><commit_msg>Add a script to import elected candidates' photos from YNR
This script uses the YNR IDs that the kenya_import_2017_election_results
command added to import the photos from YNR and add them to the
appropriate person in Mzalendo. If there were existing photos of the
person, the YNR photo is not set as primary, otherwise it would be.<commit_after>from __future__ import print_function
import csv
from os.path import abspath, dirname, exists, join
import shutil
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from images.models import Image
from pombola.core.models import Person
from pombola.core.utils import mkdir_p
from PIL import Image as PillowImage
import requests
results_directory = abspath(join(
dirname(__file__), '..', '..', 'election_data_2017', 'results'
))
class Command(BaseCommand):
help = 'Import photos for elected representatives from the 2017 election'
def handle_person(self, row):
person = Person.objects.get(
identifiers__scheme='ynr-ke', identifiers__identifier=row['id'])
image_url = row['image_url']
if not image_url:
return
# If we haven't already downloaded this file, download it:
image_filename = join(self.cache_directory, str(row['id']))
if not exists(image_filename):
r = requests.get(image_url, stream=True)
r.raise_for_status()
with open(image_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
# Check this is a file type we can cope with:
try:
pillow_image = PillowImage.open(image_filename)
except IOError as e:
if 'cannot identify image file' in e.args[0]:
print("Ignoring a non-image file {0}".format(image_filename))
return None
raise
if pillow_image.format not in ('PNG', 'JPEG'):
raise Exception("Found an unsupported image format: {0}".format(pillow_image.format))
extension = {'PNG': 'png', 'JPEG': 'jpg'}[pillow_image.format]
storage = FileSystemStorage()
desired_storage_path = join(
'images', 'kenya-ynr', '{}.{}'.format(row['id'], extension))
with open(image_filename, 'rb') as f:
storage_filename = storage.save(desired_storage_path, f)
Image.objects.create(
content_object=person,
source='http://kenya.ynr.mysociety.org/person/{0}'.format(row['id']),
is_primary=(not person.images.exists()),
image=storage_filename,
)
print("Created image for:", person)
def handle(self, **options):
self.cache_directory = join(results_directory, '.downloaded-images')
mkdir_p(self.cache_directory)
for filename in ('na.csv', 'senate.csv', 'wo.csv'):
full_filename = join(results_directory, filename)
with open(full_filename) as f:
reader = csv.DictReader(f)
for row in reader:
self.handle_person(row)
|
|
0aef588b92adc6ccc175a6b6d34784ff4d7e290d
|
coprocess/bindings/python/sample_server.py
|
coprocess/bindings/python/sample_server.py
|
import coprocess_object_pb2
import grpc, time
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
import coprocess_object_pb2
import grpc, time, json
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def DispatchEvent(self, event_wrapper, context):
event = json.loads(event_wrapper.payload)
print("DispatchEvent:", event)
return coprocess_object_pb2.EventReply()
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
Adjust gRPC/Python sample to handle events.
|
Adjust gRPC/Python sample to handle events.
|
Python
|
mpl-2.0
|
mvdan/tyk,nebolsin/tyk,mvdan/tyk,nebolsin/tyk,nebolsin/tyk,mvdan/tyk,nebolsin/tyk,nebolsin/tyk,mvdan/tyk,lonelycode/tyk,lonelycode/tyk,nebolsin/tyk,nebolsin/tyk,lonelycode/tyk,mvdan/tyk,mvdan/tyk,nebolsin/tyk,mvdan/tyk,mvdan/tyk
|
import coprocess_object_pb2
import grpc, time
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
Adjust gRPC/Python sample to handle events.
|
import coprocess_object_pb2
import grpc, time, json
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def DispatchEvent(self, event_wrapper, context):
event = json.loads(event_wrapper.payload)
print("DispatchEvent:", event)
return coprocess_object_pb2.EventReply()
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
<commit_before>import coprocess_object_pb2
import grpc, time
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
<commit_msg>Adjust gRPC/Python sample to handle events.<commit_after>
|
import coprocess_object_pb2
import grpc, time, json
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def DispatchEvent(self, event_wrapper, context):
event = json.loads(event_wrapper.payload)
print("DispatchEvent:", event)
return coprocess_object_pb2.EventReply()
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
import coprocess_object_pb2
import grpc, time
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
Adjust gRPC/Python sample to handle events.import coprocess_object_pb2
import grpc, time, json
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def DispatchEvent(self, event_wrapper, context):
event = json.loads(event_wrapper.payload)
print("DispatchEvent:", event)
return coprocess_object_pb2.EventReply()
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
<commit_before>import coprocess_object_pb2
import grpc, time
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
<commit_msg>Adjust gRPC/Python sample to handle events.<commit_after>import coprocess_object_pb2
import grpc, time, json
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
from concurrent import futures
def MyPreMiddleware(coprocess_object):
coprocess_object.request.set_headers["myheader"] = "myvalue"
return coprocess_object
def MyPostMiddleware(coprocess_object):
coprocess_object.request.set_headers["anotherheader"] = "anothervalue"
return coprocess_object
class MyDispatcher(coprocess_object_pb2.DispatcherServicer):
def Dispatch(self, coprocess_object, context):
if coprocess_object.hook_name == "MyPreMiddleware":
coprocess_object = MyPreMiddleware(coprocess_object)
if coprocess_object.hook_name == "MyPostMiddleware":
coprocess_object = MyPostMiddleware(coprocess_object)
return coprocess_object
def DispatchEvent(self, event_wrapper, context):
event = json.loads(event_wrapper.payload)
print("DispatchEvent:", event)
return coprocess_object_pb2.EventReply()
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
coprocess_object_pb2.add_DispatcherServicer_to_server(
MyDispatcher(), server)
server.add_insecure_port('[::]:5555')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
|
3e45602583a7760a5fb7b5beb47405b4dddd0f63
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
Exit with code 1 if tests fail.
|
Exit with code 1 if tests fail.
Fixes #621 and Travis.
|
Python
|
mit
|
josephbisch/the-blue-alliance,1fish2/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)Exit with code 1 if tests fail.
Fixes #621 and Travis.
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)<commit_msg>Exit with code 1 if tests fail.
Fixes #621 and Travis.<commit_after>
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)Exit with code 1 if tests fail.
Fixes #621 and Travis.#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)<commit_msg>Exit with code 1 if tests fail.
Fixes #621 and Travis.<commit_after>#!/usr/bin/python
import optparse
import sys
# Install the Python unittest2 package before you run this script.
import unittest2
USAGE = """%prog SDK_PATH
Run unit tests for App Engine apps.
The SDK Path is probably /usr/local/google_appengine on Mac OS
SDK_PATH Path to the SDK installation"""
def main(sdk_path, test_pattern):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
suite = unittest2.loader.TestLoader().discover("tests", test_pattern)
tests = unittest2.TextTestRunner(verbosity=2).run(suite)
if tests.wasSuccessful() == True:
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 1:
print 'Warning: Trying default SDK path.'
sdk_path = "/usr/local/google_appengine"
else:
sdk_path = args[0]
test_pattern = "test*.py"
if len(args) > 1:
test_pattern = args[1]
main(sdk_path, test_pattern)
|
c881b3176b122cdefe406d02e640600e6d4f4727
|
migrations/versions/ec3035d61f8b_.py
|
migrations/versions/ec3035d61f8b_.py
|
"""Add status column to tickets.
Revision ID: ec3035d61f8b
Revises: 0397e48c5db8
Create Date: 2020-08-30 21:04:30.936267
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec3035d61f8b'
down_revision = '0397e48c5db8'
branch_labels = None
depends_on = None
def upgrade():
try:
op.add_column('serials', sa.Column('status', sa.String(length=10), nullable=True))
except Exception:
pass
def downgrade():
with op.batch_alter_table('serials') as batch:
batch.drop_column('status')
|
Add ticket's status column migration.
|
Add ticket's status column migration.
|
Python
|
mpl-2.0
|
mrf345/FQM,mrf345/FQM,mrf345/FQM,mrf345/FQM
|
Add ticket's status column migration.
|
"""Add status column to tickets.
Revision ID: ec3035d61f8b
Revises: 0397e48c5db8
Create Date: 2020-08-30 21:04:30.936267
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec3035d61f8b'
down_revision = '0397e48c5db8'
branch_labels = None
depends_on = None
def upgrade():
try:
op.add_column('serials', sa.Column('status', sa.String(length=10), nullable=True))
except Exception:
pass
def downgrade():
with op.batch_alter_table('serials') as batch:
batch.drop_column('status')
|
<commit_before><commit_msg>Add ticket's status column migration.<commit_after>
|
"""Add status column to tickets.
Revision ID: ec3035d61f8b
Revises: 0397e48c5db8
Create Date: 2020-08-30 21:04:30.936267
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec3035d61f8b'
down_revision = '0397e48c5db8'
branch_labels = None
depends_on = None
def upgrade():
try:
op.add_column('serials', sa.Column('status', sa.String(length=10), nullable=True))
except Exception:
pass
def downgrade():
with op.batch_alter_table('serials') as batch:
batch.drop_column('status')
|
Add ticket's status column migration."""Add status column to tickets.
Revision ID: ec3035d61f8b
Revises: 0397e48c5db8
Create Date: 2020-08-30 21:04:30.936267
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec3035d61f8b'
down_revision = '0397e48c5db8'
branch_labels = None
depends_on = None
def upgrade():
try:
op.add_column('serials', sa.Column('status', sa.String(length=10), nullable=True))
except Exception:
pass
def downgrade():
with op.batch_alter_table('serials') as batch:
batch.drop_column('status')
|
<commit_before><commit_msg>Add ticket's status column migration.<commit_after>"""Add status column to tickets.
Revision ID: ec3035d61f8b
Revises: 0397e48c5db8
Create Date: 2020-08-30 21:04:30.936267
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec3035d61f8b'
down_revision = '0397e48c5db8'
branch_labels = None
depends_on = None
def upgrade():
try:
op.add_column('serials', sa.Column('status', sa.String(length=10), nullable=True))
except Exception:
pass
def downgrade():
with op.batch_alter_table('serials') as batch:
batch.drop_column('status')
|
|
9d501aaa9d443e450224388854f053683ca2baae
|
permute/tests/test_npc.py
|
permute/tests/test_npc.py
|
from __future__ import division, print_function, absolute_import
from nose.plugins.attrib import attr
from nose.tools import assert_raises, raises
import numpy as np
from scipy.stats import norm
from ..npc import (fisher,
liptak,
tippett)
def test_fisher():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(fisher(pvalues), 11.11546, 5)
np.testing.assert_equal(fisher(1), -0.0)
np.testing.assert_array_less(fisher(10), 0)
def test_liptak():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(liptak(pvalues), 0.5728894, 5)
np.testing.assert_equal(liptak(1), norm.ppf(0))
np.testing.assert_equal(liptak(10), np.nan)
def test_tippett():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(tippett(pvalues), 0.95, 5)
np.testing.assert_equal(tippett(1), 0)
np.testing.assert_equal(tippett(10), -9)
|
Add tests for combining functions
|
TST: Add tests for combining functions
|
Python
|
bsd-2-clause
|
qqqube/permute,statlab/permute,kellieotto/permute,jarrodmillman/permute,kellieotto/permute
|
TST: Add tests for combining functions
|
from __future__ import division, print_function, absolute_import
from nose.plugins.attrib import attr
from nose.tools import assert_raises, raises
import numpy as np
from scipy.stats import norm
from ..npc import (fisher,
liptak,
tippett)
def test_fisher():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(fisher(pvalues), 11.11546, 5)
np.testing.assert_equal(fisher(1), -0.0)
np.testing.assert_array_less(fisher(10), 0)
def test_liptak():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(liptak(pvalues), 0.5728894, 5)
np.testing.assert_equal(liptak(1), norm.ppf(0))
np.testing.assert_equal(liptak(10), np.nan)
def test_tippett():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(tippett(pvalues), 0.95, 5)
np.testing.assert_equal(tippett(1), 0)
np.testing.assert_equal(tippett(10), -9)
|
<commit_before><commit_msg>TST: Add tests for combining functions<commit_after>
|
from __future__ import division, print_function, absolute_import
from nose.plugins.attrib import attr
from nose.tools import assert_raises, raises
import numpy as np
from scipy.stats import norm
from ..npc import (fisher,
liptak,
tippett)
def test_fisher():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(fisher(pvalues), 11.11546, 5)
np.testing.assert_equal(fisher(1), -0.0)
np.testing.assert_array_less(fisher(10), 0)
def test_liptak():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(liptak(pvalues), 0.5728894, 5)
np.testing.assert_equal(liptak(1), norm.ppf(0))
np.testing.assert_equal(liptak(10), np.nan)
def test_tippett():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(tippett(pvalues), 0.95, 5)
np.testing.assert_equal(tippett(1), 0)
np.testing.assert_equal(tippett(10), -9)
|
TST: Add tests for combining functionsfrom __future__ import division, print_function, absolute_import
from nose.plugins.attrib import attr
from nose.tools import assert_raises, raises
import numpy as np
from scipy.stats import norm
from ..npc import (fisher,
liptak,
tippett)
def test_fisher():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(fisher(pvalues), 11.11546, 5)
np.testing.assert_equal(fisher(1), -0.0)
np.testing.assert_array_less(fisher(10), 0)
def test_liptak():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(liptak(pvalues), 0.5728894, 5)
np.testing.assert_equal(liptak(1), norm.ppf(0))
np.testing.assert_equal(liptak(10), np.nan)
def test_tippett():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(tippett(pvalues), 0.95, 5)
np.testing.assert_equal(tippett(1), 0)
np.testing.assert_equal(tippett(10), -9)
|
<commit_before><commit_msg>TST: Add tests for combining functions<commit_after>from __future__ import division, print_function, absolute_import
from nose.plugins.attrib import attr
from nose.tools import assert_raises, raises
import numpy as np
from scipy.stats import norm
from ..npc import (fisher,
liptak,
tippett)
def test_fisher():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(fisher(pvalues), 11.11546, 5)
np.testing.assert_equal(fisher(1), -0.0)
np.testing.assert_array_less(fisher(10), 0)
def test_liptak():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(liptak(pvalues), 0.5728894, 5)
np.testing.assert_equal(liptak(1), norm.ppf(0))
np.testing.assert_equal(liptak(10), np.nan)
def test_tippett():
pvalues = np.linspace(0.05, 0.9, num=5)
np.testing.assert_almost_equal(tippett(pvalues), 0.95, 5)
np.testing.assert_equal(tippett(1), 0)
np.testing.assert_equal(tippett(10), -9)
|
|
827802f111fbe5c2f9a50b74a46afdef5eae2a2d
|
salt/pillar/nacl.py
|
salt/pillar/nacl.py
|
# -*- coding: utf-8 -*-
'''
Decrypt pillar data through the builtin NACL renderer
In most cases, you'll want to make this the last external pillar used. For
example, to pair with the builtin stack pillar you could do something like
this:
.. code:: yaml
nacl.config:
keyfile: /root/.nacl
ext_pillar:
- stack: /path/to/stack.cfg
- nacl: {}
Set ``nacl.config`` in your config.
'''
from __future__ import absolute_import
import salt
def ext_pillar(minion_id, pillar, *args, **kwargs):
render_function = salt.loader.render(__opts__, __salt__).get("nacl")
return render_function(pillar)
|
Decrypt NACL passwords on ext_pillar
|
Decrypt NACL passwords on ext_pillar
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Decrypt NACL passwords on ext_pillar
|
# -*- coding: utf-8 -*-
'''
Decrypt pillar data through the builtin NACL renderer
In most cases, you'll want to make this the last external pillar used. For
example, to pair with the builtin stack pillar you could do something like
this:
.. code:: yaml
nacl.config:
keyfile: /root/.nacl
ext_pillar:
- stack: /path/to/stack.cfg
- nacl: {}
Set ``nacl.config`` in your config.
'''
from __future__ import absolute_import
import salt
def ext_pillar(minion_id, pillar, *args, **kwargs):
render_function = salt.loader.render(__opts__, __salt__).get("nacl")
return render_function(pillar)
|
<commit_before><commit_msg>Decrypt NACL passwords on ext_pillar<commit_after>
|
# -*- coding: utf-8 -*-
'''
Decrypt pillar data through the builtin NACL renderer
In most cases, you'll want to make this the last external pillar used. For
example, to pair with the builtin stack pillar you could do something like
this:
.. code:: yaml
nacl.config:
keyfile: /root/.nacl
ext_pillar:
- stack: /path/to/stack.cfg
- nacl: {}
Set ``nacl.config`` in your config.
'''
from __future__ import absolute_import
import salt
def ext_pillar(minion_id, pillar, *args, **kwargs):
render_function = salt.loader.render(__opts__, __salt__).get("nacl")
return render_function(pillar)
|
Decrypt NACL passwords on ext_pillar# -*- coding: utf-8 -*-
'''
Decrypt pillar data through the builtin NACL renderer
In most cases, you'll want to make this the last external pillar used. For
example, to pair with the builtin stack pillar you could do something like
this:
.. code:: yaml
nacl.config:
keyfile: /root/.nacl
ext_pillar:
- stack: /path/to/stack.cfg
- nacl: {}
Set ``nacl.config`` in your config.
'''
from __future__ import absolute_import
import salt
def ext_pillar(minion_id, pillar, *args, **kwargs):
render_function = salt.loader.render(__opts__, __salt__).get("nacl")
return render_function(pillar)
|
<commit_before><commit_msg>Decrypt NACL passwords on ext_pillar<commit_after># -*- coding: utf-8 -*-
'''
Decrypt pillar data through the builtin NACL renderer
In most cases, you'll want to make this the last external pillar used. For
example, to pair with the builtin stack pillar you could do something like
this:
.. code:: yaml
nacl.config:
keyfile: /root/.nacl
ext_pillar:
- stack: /path/to/stack.cfg
- nacl: {}
Set ``nacl.config`` in your config.
'''
from __future__ import absolute_import
import salt
def ext_pillar(minion_id, pillar, *args, **kwargs):
render_function = salt.loader.render(__opts__, __salt__).get("nacl")
return render_function(pillar)
|
|
ce8dc2152a508ea359159a917ea469a106065503
|
modules/neural_network.py
|
modules/neural_network.py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 14:10:49 2017
@author: daniele
"""
import tensorflow as tf
import os
import numpy as np
import matplotlib.pyplot as plt
|
Add (currently empty) neural netowrks module
|
feat: Add (currently empty) neural netowrks module
|
Python
|
mit
|
dangall/Kaggle-MobileODT-Cancer-Screening
|
feat: Add (currently empty) neural netowrks module
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 14:10:49 2017
@author: daniele
"""
import tensorflow as tf
import os
import numpy as np
import matplotlib.pyplot as plt
|
<commit_before><commit_msg>feat: Add (currently empty) neural netowrks module<commit_after>
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 14:10:49 2017
@author: daniele
"""
import tensorflow as tf
import os
import numpy as np
import matplotlib.pyplot as plt
|
feat: Add (currently empty) neural netowrks module#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 14:10:49 2017
@author: daniele
"""
import tensorflow as tf
import os
import numpy as np
import matplotlib.pyplot as plt
|
<commit_before><commit_msg>feat: Add (currently empty) neural netowrks module<commit_after>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 14:10:49 2017
@author: daniele
"""
import tensorflow as tf
import os
import numpy as np
import matplotlib.pyplot as plt
|
|
f8b6d9a1bb12087c25e60df03f9f40c435f1a949
|
tests/test_archive.py
|
tests/test_archive.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ['', 'Expenditure', 'Income'])
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ('a', 'Expenditure', 'Income'))
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
Update test for recent version of agate
|
Update test for recent version of agate
|
Python
|
mit
|
wireservice/agate-remote,onyxfish/agate-remote
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ['', 'Expenditure', 'Income'])
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
Update test for recent version of agate
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ('a', 'Expenditure', 'Income'))
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ['', 'Expenditure', 'Income'])
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
<commit_msg>Update test for recent version of agate<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ('a', 'Expenditure', 'Income'))
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ['', 'Expenditure', 'Income'])
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
Update test for recent version of agate#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ('a', 'Expenditure', 'Income'))
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ['', 'Expenditure', 'Income'])
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
<commit_msg>Update test for recent version of agate<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
import agate
import agateremote
class TestArchive(agate.AgateTestCase):
def setUp(self):
self.archive = agateremote.Archive('https://github.com/vincentarelbundock/Rdatasets/raw/master/csv/')
def test_get_table(self):
table = self.archive.get_table('sandwich/PublicSchools.csv')
self.assertColumnNames(table, ('a', 'Expenditure', 'Income'))
self.assertColumnTypes(table, [agate.Text, agate.Number, agate.Number])
self.assertEqual(len(table.rows), 51)
|
533160e303fd73a8e6f53ebc7e6430bba8888bbb
|
tests/test_player_draft.py
|
tests/test_player_draft.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from db.player_draft import PlayerDraft
from db.team import Team
def test_find_by_id():
pdft = PlayerDraft.find_by_player_id(8475883) # Frederik Andersen
assert len(pdft) == 2
pdft = PlayerDraft.find_by_player_id(8466145) # Nick Boynton
assert len(pdft) == 2
def test_find():
pdft = PlayerDraft.find(8479318, 10, 2016) # Auston Matthews
assert pdft.round == 1
assert pdft.overall == 1
def test_constructor():
pdft = PlayerDraft(8999444, 1, 2018, 3, 75) # fictional player
assert pdft.player_id == 8999444
assert Team.find_by_id(pdft.team_id).name == 'New Jersey Devils'
assert pdft.year == 2018
assert pdft.round == 3
assert pdft.overall == 75
def test_comparison_operators():
pdft_kopitar = PlayerDraft.find_by_player_id(8471685).pop(0) # 2005, 11
pdft_toews = PlayerDraft.find_by_player_id(8473604).pop(0) # 2006, 3
pdft_kessel = PlayerDraft.find_by_player_id(8473548).pop(0) # 2006, 5
pdft_stamkos = PlayerDraft.find_by_player_id(8474564).pop(0) # 2008, 1
ordered = sorted([pdft_kessel, pdft_kopitar, pdft_stamkos, pdft_toews])
assert ordered[0] == pdft_kopitar
assert ordered[1] == pdft_toews
assert ordered[2] == pdft_kessel
assert ordered[3] == pdft_stamkos
|
Add tests for player draft items
|
Add tests for player draft items
|
Python
|
mit
|
leaffan/pynhldb
|
Add tests for player draft items
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from db.player_draft import PlayerDraft
from db.team import Team
def test_find_by_id():
pdft = PlayerDraft.find_by_player_id(8475883) # Frederik Andersen
assert len(pdft) == 2
pdft = PlayerDraft.find_by_player_id(8466145) # Nick Boynton
assert len(pdft) == 2
def test_find():
pdft = PlayerDraft.find(8479318, 10, 2016) # Auston Matthews
assert pdft.round == 1
assert pdft.overall == 1
def test_constructor():
pdft = PlayerDraft(8999444, 1, 2018, 3, 75) # fictional player
assert pdft.player_id == 8999444
assert Team.find_by_id(pdft.team_id).name == 'New Jersey Devils'
assert pdft.year == 2018
assert pdft.round == 3
assert pdft.overall == 75
def test_comparison_operators():
pdft_kopitar = PlayerDraft.find_by_player_id(8471685).pop(0) # 2005, 11
pdft_toews = PlayerDraft.find_by_player_id(8473604).pop(0) # 2006, 3
pdft_kessel = PlayerDraft.find_by_player_id(8473548).pop(0) # 2006, 5
pdft_stamkos = PlayerDraft.find_by_player_id(8474564).pop(0) # 2008, 1
ordered = sorted([pdft_kessel, pdft_kopitar, pdft_stamkos, pdft_toews])
assert ordered[0] == pdft_kopitar
assert ordered[1] == pdft_toews
assert ordered[2] == pdft_kessel
assert ordered[3] == pdft_stamkos
|
<commit_before><commit_msg>Add tests for player draft items<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from db.player_draft import PlayerDraft
from db.team import Team
def test_find_by_id():
pdft = PlayerDraft.find_by_player_id(8475883) # Frederik Andersen
assert len(pdft) == 2
pdft = PlayerDraft.find_by_player_id(8466145) # Nick Boynton
assert len(pdft) == 2
def test_find():
pdft = PlayerDraft.find(8479318, 10, 2016) # Auston Matthews
assert pdft.round == 1
assert pdft.overall == 1
def test_constructor():
pdft = PlayerDraft(8999444, 1, 2018, 3, 75) # fictional player
assert pdft.player_id == 8999444
assert Team.find_by_id(pdft.team_id).name == 'New Jersey Devils'
assert pdft.year == 2018
assert pdft.round == 3
assert pdft.overall == 75
def test_comparison_operators():
pdft_kopitar = PlayerDraft.find_by_player_id(8471685).pop(0) # 2005, 11
pdft_toews = PlayerDraft.find_by_player_id(8473604).pop(0) # 2006, 3
pdft_kessel = PlayerDraft.find_by_player_id(8473548).pop(0) # 2006, 5
pdft_stamkos = PlayerDraft.find_by_player_id(8474564).pop(0) # 2008, 1
ordered = sorted([pdft_kessel, pdft_kopitar, pdft_stamkos, pdft_toews])
assert ordered[0] == pdft_kopitar
assert ordered[1] == pdft_toews
assert ordered[2] == pdft_kessel
assert ordered[3] == pdft_stamkos
|
Add tests for player draft items#!/usr/bin/env python
# -*- coding: utf-8 -*-
from db.player_draft import PlayerDraft
from db.team import Team
def test_find_by_id():
pdft = PlayerDraft.find_by_player_id(8475883) # Frederik Andersen
assert len(pdft) == 2
pdft = PlayerDraft.find_by_player_id(8466145) # Nick Boynton
assert len(pdft) == 2
def test_find():
pdft = PlayerDraft.find(8479318, 10, 2016) # Auston Matthews
assert pdft.round == 1
assert pdft.overall == 1
def test_constructor():
pdft = PlayerDraft(8999444, 1, 2018, 3, 75) # fictional player
assert pdft.player_id == 8999444
assert Team.find_by_id(pdft.team_id).name == 'New Jersey Devils'
assert pdft.year == 2018
assert pdft.round == 3
assert pdft.overall == 75
def test_comparison_operators():
pdft_kopitar = PlayerDraft.find_by_player_id(8471685).pop(0) # 2005, 11
pdft_toews = PlayerDraft.find_by_player_id(8473604).pop(0) # 2006, 3
pdft_kessel = PlayerDraft.find_by_player_id(8473548).pop(0) # 2006, 5
pdft_stamkos = PlayerDraft.find_by_player_id(8474564).pop(0) # 2008, 1
ordered = sorted([pdft_kessel, pdft_kopitar, pdft_stamkos, pdft_toews])
assert ordered[0] == pdft_kopitar
assert ordered[1] == pdft_toews
assert ordered[2] == pdft_kessel
assert ordered[3] == pdft_stamkos
|
<commit_before><commit_msg>Add tests for player draft items<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from db.player_draft import PlayerDraft
from db.team import Team
def test_find_by_id():
pdft = PlayerDraft.find_by_player_id(8475883) # Frederik Andersen
assert len(pdft) == 2
pdft = PlayerDraft.find_by_player_id(8466145) # Nick Boynton
assert len(pdft) == 2
def test_find():
pdft = PlayerDraft.find(8479318, 10, 2016) # Auston Matthews
assert pdft.round == 1
assert pdft.overall == 1
def test_constructor():
pdft = PlayerDraft(8999444, 1, 2018, 3, 75) # fictional player
assert pdft.player_id == 8999444
assert Team.find_by_id(pdft.team_id).name == 'New Jersey Devils'
assert pdft.year == 2018
assert pdft.round == 3
assert pdft.overall == 75
def test_comparison_operators():
pdft_kopitar = PlayerDraft.find_by_player_id(8471685).pop(0) # 2005, 11
pdft_toews = PlayerDraft.find_by_player_id(8473604).pop(0) # 2006, 3
pdft_kessel = PlayerDraft.find_by_player_id(8473548).pop(0) # 2006, 5
pdft_stamkos = PlayerDraft.find_by_player_id(8474564).pop(0) # 2008, 1
ordered = sorted([pdft_kessel, pdft_kopitar, pdft_stamkos, pdft_toews])
assert ordered[0] == pdft_kopitar
assert ordered[1] == pdft_toews
assert ordered[2] == pdft_kessel
assert ordered[3] == pdft_stamkos
|
|
517c29e34a1bc43df60cbabad998a1c0581e7b21
|
pybossa/error/__init__.py
|
pybossa/error/__init__.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
|
Refactor HTTP Error status in a module
|
Refactor HTTP Error status in a module
|
Python
|
agpl-3.0
|
stefanhahmann/pybossa,jean/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,OpenNewsLabs/pybossa,inteligencia-coletiva-lsd/pybossa,geotagx/geotagx-pybossa-archive,inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,harihpr/tweetclickers,geotagx/pybossa,stefanhahmann/pybossa,geotagx/geotagx-pybossa-archive,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,geotagx/geotagx-pybossa-archive,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,CulturePlex/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,proyectos-analizo-info/pybossa-analizo-info,CulturePlex/pybossa,jean/pybossa,PyBossa/pybossa
|
Refactor HTTP Error status in a module
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
|
<commit_before><commit_msg>Refactor HTTP Error status in a module<commit_after>
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
|
Refactor HTTP Error status in a module# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
|
<commit_before><commit_msg>Refactor HTTP Error status in a module<commit_after># -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
|
|
17c69212ff5ea1a5991dc89ba1ca7365c8d666c2
|
firmware/common/binsize.py
|
firmware/common/binsize.py
|
#!/usr/bin/env python
from subprocess import Popen, PIPE
class app(object):
def __init__(self):
pass
motolink = app()
motolink.name = "ch"
motolink.path = "build/ch.elf"
motolink.max_ccm = 4*1024
motolink.max_ram = 12*1024
motolink.max_rom = 64*1024
APPS = [motolink]
for app in APPS:
ccm = 0
ram = 0
rom = 0
p = Popen(["arm-none-eabi-size", "-A", app.path], stdout=PIPE)
if p.wait() == 0:
output = p.stdout.read()
lines = filter(None, output.split("\n"))
for line in lines:
columns = filter(None, line.split(" "))
if ".stacks" in columns[0]:
ram += int(columns[1])
elif ".ram4" in columns[0]:
ccm += int(columns[1])
rom += int(columns[1])
elif ".bss" in columns[0]:
ram += int(columns[1])
elif ".data" in columns[0]:
ram += int(columns[1])
rom += int(columns[1])
elif ".text" in columns[0]:
rom += int(columns[1])
elif ".startup" in columns[0]:
rom += int(columns[1])
print ""
print app.name
print "CCM used: {}% - {:4.1f}/{}k".format((ccm*100)/app.max_ccm,
ccm/1024.0,
app.max_ccm/1024.0)
print "RAM used: {}% - {:4.1f}/{}k".format((ram*100)/app.max_ram,
ram/1024.0,
app.max_ram/1024.0)
print "ROM used: {}% - {:4.1f}/{}k".format((rom*100)/app.max_rom,
rom/1024.0,
app.max_rom/1024.0)
|
Add a Python script to get ROM and RAM usage.
|
Add a Python script to get ROM and RAM usage.
|
Python
|
apache-2.0
|
romainreignier/robot2017,romainreignier/robot2017,romainreignier/robot2017,romainreignier/robot2017
|
Add a Python script to get ROM and RAM usage.
|
#!/usr/bin/env python
from subprocess import Popen, PIPE
class app(object):
def __init__(self):
pass
motolink = app()
motolink.name = "ch"
motolink.path = "build/ch.elf"
motolink.max_ccm = 4*1024
motolink.max_ram = 12*1024
motolink.max_rom = 64*1024
APPS = [motolink]
for app in APPS:
ccm = 0
ram = 0
rom = 0
p = Popen(["arm-none-eabi-size", "-A", app.path], stdout=PIPE)
if p.wait() == 0:
output = p.stdout.read()
lines = filter(None, output.split("\n"))
for line in lines:
columns = filter(None, line.split(" "))
if ".stacks" in columns[0]:
ram += int(columns[1])
elif ".ram4" in columns[0]:
ccm += int(columns[1])
rom += int(columns[1])
elif ".bss" in columns[0]:
ram += int(columns[1])
elif ".data" in columns[0]:
ram += int(columns[1])
rom += int(columns[1])
elif ".text" in columns[0]:
rom += int(columns[1])
elif ".startup" in columns[0]:
rom += int(columns[1])
print ""
print app.name
print "CCM used: {}% - {:4.1f}/{}k".format((ccm*100)/app.max_ccm,
ccm/1024.0,
app.max_ccm/1024.0)
print "RAM used: {}% - {:4.1f}/{}k".format((ram*100)/app.max_ram,
ram/1024.0,
app.max_ram/1024.0)
print "ROM used: {}% - {:4.1f}/{}k".format((rom*100)/app.max_rom,
rom/1024.0,
app.max_rom/1024.0)
|
<commit_before><commit_msg>Add a Python script to get ROM and RAM usage.<commit_after>
|
#!/usr/bin/env python
from subprocess import Popen, PIPE
class app(object):
def __init__(self):
pass
motolink = app()
motolink.name = "ch"
motolink.path = "build/ch.elf"
motolink.max_ccm = 4*1024
motolink.max_ram = 12*1024
motolink.max_rom = 64*1024
APPS = [motolink]
for app in APPS:
ccm = 0
ram = 0
rom = 0
p = Popen(["arm-none-eabi-size", "-A", app.path], stdout=PIPE)
if p.wait() == 0:
output = p.stdout.read()
lines = filter(None, output.split("\n"))
for line in lines:
columns = filter(None, line.split(" "))
if ".stacks" in columns[0]:
ram += int(columns[1])
elif ".ram4" in columns[0]:
ccm += int(columns[1])
rom += int(columns[1])
elif ".bss" in columns[0]:
ram += int(columns[1])
elif ".data" in columns[0]:
ram += int(columns[1])
rom += int(columns[1])
elif ".text" in columns[0]:
rom += int(columns[1])
elif ".startup" in columns[0]:
rom += int(columns[1])
print ""
print app.name
print "CCM used: {}% - {:4.1f}/{}k".format((ccm*100)/app.max_ccm,
ccm/1024.0,
app.max_ccm/1024.0)
print "RAM used: {}% - {:4.1f}/{}k".format((ram*100)/app.max_ram,
ram/1024.0,
app.max_ram/1024.0)
print "ROM used: {}% - {:4.1f}/{}k".format((rom*100)/app.max_rom,
rom/1024.0,
app.max_rom/1024.0)
|
Add a Python script to get ROM and RAM usage.#!/usr/bin/env python
from subprocess import Popen, PIPE
class app(object):
def __init__(self):
pass
motolink = app()
motolink.name = "ch"
motolink.path = "build/ch.elf"
motolink.max_ccm = 4*1024
motolink.max_ram = 12*1024
motolink.max_rom = 64*1024
APPS = [motolink]
for app in APPS:
ccm = 0
ram = 0
rom = 0
p = Popen(["arm-none-eabi-size", "-A", app.path], stdout=PIPE)
if p.wait() == 0:
output = p.stdout.read()
lines = filter(None, output.split("\n"))
for line in lines:
columns = filter(None, line.split(" "))
if ".stacks" in columns[0]:
ram += int(columns[1])
elif ".ram4" in columns[0]:
ccm += int(columns[1])
rom += int(columns[1])
elif ".bss" in columns[0]:
ram += int(columns[1])
elif ".data" in columns[0]:
ram += int(columns[1])
rom += int(columns[1])
elif ".text" in columns[0]:
rom += int(columns[1])
elif ".startup" in columns[0]:
rom += int(columns[1])
print ""
print app.name
print "CCM used: {}% - {:4.1f}/{}k".format((ccm*100)/app.max_ccm,
ccm/1024.0,
app.max_ccm/1024.0)
print "RAM used: {}% - {:4.1f}/{}k".format((ram*100)/app.max_ram,
ram/1024.0,
app.max_ram/1024.0)
print "ROM used: {}% - {:4.1f}/{}k".format((rom*100)/app.max_rom,
rom/1024.0,
app.max_rom/1024.0)
|
<commit_before><commit_msg>Add a Python script to get ROM and RAM usage.<commit_after>#!/usr/bin/env python
from subprocess import Popen, PIPE
class app(object):
def __init__(self):
pass
motolink = app()
motolink.name = "ch"
motolink.path = "build/ch.elf"
motolink.max_ccm = 4*1024
motolink.max_ram = 12*1024
motolink.max_rom = 64*1024
APPS = [motolink]
for app in APPS:
ccm = 0
ram = 0
rom = 0
p = Popen(["arm-none-eabi-size", "-A", app.path], stdout=PIPE)
if p.wait() == 0:
output = p.stdout.read()
lines = filter(None, output.split("\n"))
for line in lines:
columns = filter(None, line.split(" "))
if ".stacks" in columns[0]:
ram += int(columns[1])
elif ".ram4" in columns[0]:
ccm += int(columns[1])
rom += int(columns[1])
elif ".bss" in columns[0]:
ram += int(columns[1])
elif ".data" in columns[0]:
ram += int(columns[1])
rom += int(columns[1])
elif ".text" in columns[0]:
rom += int(columns[1])
elif ".startup" in columns[0]:
rom += int(columns[1])
print ""
print app.name
print "CCM used: {}% - {:4.1f}/{}k".format((ccm*100)/app.max_ccm,
ccm/1024.0,
app.max_ccm/1024.0)
print "RAM used: {}% - {:4.1f}/{}k".format((ram*100)/app.max_ram,
ram/1024.0,
app.max_ram/1024.0)
print "ROM used: {}% - {:4.1f}/{}k".format((rom*100)/app.max_rom,
rom/1024.0,
app.max_rom/1024.0)
|
|
4f189c20fc535f780e905e6fee7351329240d8fc
|
tyr/clusters/mongo.py
|
tyr/clusters/mongo.py
|
import logging
class MongoCluster(object):
log = logging.getLogger('Clusters.Mongo')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s [%(name)s] %(levelname)s: %(message)s',
datefmt = '%H:%M:%S')
ch.setFormatter(formatter)
log.addHandler(ch)
|
Set up logging for a MongoDB cluster
|
Set up logging for a MongoDB cluster
|
Python
|
unlicense
|
hudl/Tyr
|
Set up logging for a MongoDB cluster
|
import logging
class MongoCluster(object):
log = logging.getLogger('Clusters.Mongo')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s [%(name)s] %(levelname)s: %(message)s',
datefmt = '%H:%M:%S')
ch.setFormatter(formatter)
log.addHandler(ch)
|
<commit_before><commit_msg>Set up logging for a MongoDB cluster<commit_after>
|
import logging
class MongoCluster(object):
log = logging.getLogger('Clusters.Mongo')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s [%(name)s] %(levelname)s: %(message)s',
datefmt = '%H:%M:%S')
ch.setFormatter(formatter)
log.addHandler(ch)
|
Set up logging for a MongoDB clusterimport logging
class MongoCluster(object):
log = logging.getLogger('Clusters.Mongo')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s [%(name)s] %(levelname)s: %(message)s',
datefmt = '%H:%M:%S')
ch.setFormatter(formatter)
log.addHandler(ch)
|
<commit_before><commit_msg>Set up logging for a MongoDB cluster<commit_after>import logging
class MongoCluster(object):
log = logging.getLogger('Clusters.Mongo')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s [%(name)s] %(levelname)s: %(message)s',
datefmt = '%H:%M:%S')
ch.setFormatter(formatter)
log.addHandler(ch)
|
|
feafbefbc7de1f71cfa50af51139c4a09a306012
|
socketio-client.py
|
socketio-client.py
|
import websocket, httplib, sys, asyncore
'''
connect to the socketio server
1. perform the HTTP handshake
2. open a websocket connection '''
def connect(server, port):
print("connecting to: %s:%d" %(server, port))
conn = httplib.HTTPConnection(server + ":" + str(port))
conn.request('POST','/socket.io/1/')
resp = conn.getresponse()
hskey = resp.read().split(':')[0]
ws = websocket.WebSocket(
'ws://'+server+':'+str(port)+'/socket.io/1/websocket/'+hskey,
onopen = _onopen,
onmessage = _onmessage,
onclose = _onclose)
return ws
def _onopen():
print("opened!")
def _onmessage(msg):
print("msg: " + str(msg))
def _onclose():
print("closed!")
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('usage: python client.py <server> <port>\n')
sys.exit(1)
server = sys.argv[1]
port = int(sys.argv[2])
ws = connect(server, port)
try:
asyncore.loop()
except KeyboardInterrupt:
ws.close()
|
Add socket io client example
|
Add socket io client example
|
Python
|
mit
|
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
|
Add socket io client example
|
import websocket, httplib, sys, asyncore
'''
connect to the socketio server
1. perform the HTTP handshake
2. open a websocket connection '''
def connect(server, port):
print("connecting to: %s:%d" %(server, port))
conn = httplib.HTTPConnection(server + ":" + str(port))
conn.request('POST','/socket.io/1/')
resp = conn.getresponse()
hskey = resp.read().split(':')[0]
ws = websocket.WebSocket(
'ws://'+server+':'+str(port)+'/socket.io/1/websocket/'+hskey,
onopen = _onopen,
onmessage = _onmessage,
onclose = _onclose)
return ws
def _onopen():
print("opened!")
def _onmessage(msg):
print("msg: " + str(msg))
def _onclose():
print("closed!")
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('usage: python client.py <server> <port>\n')
sys.exit(1)
server = sys.argv[1]
port = int(sys.argv[2])
ws = connect(server, port)
try:
asyncore.loop()
except KeyboardInterrupt:
ws.close()
|
<commit_before><commit_msg>Add socket io client example<commit_after>
|
import websocket, httplib, sys, asyncore
'''
connect to the socketio server
1. perform the HTTP handshake
2. open a websocket connection '''
def connect(server, port):
print("connecting to: %s:%d" %(server, port))
conn = httplib.HTTPConnection(server + ":" + str(port))
conn.request('POST','/socket.io/1/')
resp = conn.getresponse()
hskey = resp.read().split(':')[0]
ws = websocket.WebSocket(
'ws://'+server+':'+str(port)+'/socket.io/1/websocket/'+hskey,
onopen = _onopen,
onmessage = _onmessage,
onclose = _onclose)
return ws
def _onopen():
print("opened!")
def _onmessage(msg):
print("msg: " + str(msg))
def _onclose():
print("closed!")
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('usage: python client.py <server> <port>\n')
sys.exit(1)
server = sys.argv[1]
port = int(sys.argv[2])
ws = connect(server, port)
try:
asyncore.loop()
except KeyboardInterrupt:
ws.close()
|
Add socket io client exampleimport websocket, httplib, sys, asyncore
'''
connect to the socketio server
1. perform the HTTP handshake
2. open a websocket connection '''
def connect(server, port):
print("connecting to: %s:%d" %(server, port))
conn = httplib.HTTPConnection(server + ":" + str(port))
conn.request('POST','/socket.io/1/')
resp = conn.getresponse()
hskey = resp.read().split(':')[0]
ws = websocket.WebSocket(
'ws://'+server+':'+str(port)+'/socket.io/1/websocket/'+hskey,
onopen = _onopen,
onmessage = _onmessage,
onclose = _onclose)
return ws
def _onopen():
print("opened!")
def _onmessage(msg):
print("msg: " + str(msg))
def _onclose():
print("closed!")
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('usage: python client.py <server> <port>\n')
sys.exit(1)
server = sys.argv[1]
port = int(sys.argv[2])
ws = connect(server, port)
try:
asyncore.loop()
except KeyboardInterrupt:
ws.close()
|
<commit_before><commit_msg>Add socket io client example<commit_after>import websocket, httplib, sys, asyncore
'''
connect to the socketio server
1. perform the HTTP handshake
2. open a websocket connection '''
def connect(server, port):
print("connecting to: %s:%d" %(server, port))
conn = httplib.HTTPConnection(server + ":" + str(port))
conn.request('POST','/socket.io/1/')
resp = conn.getresponse()
hskey = resp.read().split(':')[0]
ws = websocket.WebSocket(
'ws://'+server+':'+str(port)+'/socket.io/1/websocket/'+hskey,
onopen = _onopen,
onmessage = _onmessage,
onclose = _onclose)
return ws
def _onopen():
print("opened!")
def _onmessage(msg):
print("msg: " + str(msg))
def _onclose():
print("closed!")
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('usage: python client.py <server> <port>\n')
sys.exit(1)
server = sys.argv[1]
port = int(sys.argv[2])
ws = connect(server, port)
try:
asyncore.loop()
except KeyboardInterrupt:
ws.close()
|
|
c56efa68fbc878d307391910de9c521e5f2a6673
|
python/dynamic_class.py
|
python/dynamic_class.py
|
# -*- coding:utf-8 -*-
class CanDoSomething(object):
message = "You don't have permission"
def has_permission(self, request, view):
return True
class CanDoSomethingOrReadOnly(CanDoSomething):
def has_permission(self, request, view):
return False
class HasPermissionToDo(object):
p_class = CanDoSomething
name = "HasPermissionToDo%s"
def __init__(self, permission_code, *args, **kwargs):
pass
def __new__(cls, permission_code, *args, **kwargs):
code = permission_code.split('.')[1]
name = ''.join(x.title() for x in code.split('_'))
cls.p_class.message= "hello world"
cls.p_class.__name__ = cls.name % name
return cls.p_class
class HasPermissionToDoOrReadOnly(HasPermissionToDo):
p_class = CanDoSomethingOrReadOnly
name = "HasPermissionToDo%sOrReadOnly"
|
Add python dynamic class sample
|
Add python dynamic class sample
|
Python
|
mit
|
aiden0z/snippets,aiden0z/snippets,aiden0z/snippets,aiden0z/snippets,aiden0z/snippets,aiden0z/snippets
|
Add python dynamic class sample
|
# -*- coding:utf-8 -*-
class CanDoSomething(object):
message = "You don't have permission"
def has_permission(self, request, view):
return True
class CanDoSomethingOrReadOnly(CanDoSomething):
def has_permission(self, request, view):
return False
class HasPermissionToDo(object):
p_class = CanDoSomething
name = "HasPermissionToDo%s"
def __init__(self, permission_code, *args, **kwargs):
pass
def __new__(cls, permission_code, *args, **kwargs):
code = permission_code.split('.')[1]
name = ''.join(x.title() for x in code.split('_'))
cls.p_class.message= "hello world"
cls.p_class.__name__ = cls.name % name
return cls.p_class
class HasPermissionToDoOrReadOnly(HasPermissionToDo):
p_class = CanDoSomethingOrReadOnly
name = "HasPermissionToDo%sOrReadOnly"
|
<commit_before><commit_msg>Add python dynamic class sample<commit_after>
|
# -*- coding:utf-8 -*-
class CanDoSomething(object):
message = "You don't have permission"
def has_permission(self, request, view):
return True
class CanDoSomethingOrReadOnly(CanDoSomething):
def has_permission(self, request, view):
return False
class HasPermissionToDo(object):
p_class = CanDoSomething
name = "HasPermissionToDo%s"
def __init__(self, permission_code, *args, **kwargs):
pass
def __new__(cls, permission_code, *args, **kwargs):
code = permission_code.split('.')[1]
name = ''.join(x.title() for x in code.split('_'))
cls.p_class.message= "hello world"
cls.p_class.__name__ = cls.name % name
return cls.p_class
class HasPermissionToDoOrReadOnly(HasPermissionToDo):
p_class = CanDoSomethingOrReadOnly
name = "HasPermissionToDo%sOrReadOnly"
|
Add python dynamic class sample# -*- coding:utf-8 -*-
class CanDoSomething(object):
message = "You don't have permission"
def has_permission(self, request, view):
return True
class CanDoSomethingOrReadOnly(CanDoSomething):
def has_permission(self, request, view):
return False
class HasPermissionToDo(object):
p_class = CanDoSomething
name = "HasPermissionToDo%s"
def __init__(self, permission_code, *args, **kwargs):
pass
def __new__(cls, permission_code, *args, **kwargs):
code = permission_code.split('.')[1]
name = ''.join(x.title() for x in code.split('_'))
cls.p_class.message= "hello world"
cls.p_class.__name__ = cls.name % name
return cls.p_class
class HasPermissionToDoOrReadOnly(HasPermissionToDo):
p_class = CanDoSomethingOrReadOnly
name = "HasPermissionToDo%sOrReadOnly"
|
<commit_before><commit_msg>Add python dynamic class sample<commit_after># -*- coding:utf-8 -*-
class CanDoSomething(object):
message = "You don't have permission"
def has_permission(self, request, view):
return True
class CanDoSomethingOrReadOnly(CanDoSomething):
def has_permission(self, request, view):
return False
class HasPermissionToDo(object):
p_class = CanDoSomething
name = "HasPermissionToDo%s"
def __init__(self, permission_code, *args, **kwargs):
pass
def __new__(cls, permission_code, *args, **kwargs):
code = permission_code.split('.')[1]
name = ''.join(x.title() for x in code.split('_'))
cls.p_class.message= "hello world"
cls.p_class.__name__ = cls.name % name
return cls.p_class
class HasPermissionToDoOrReadOnly(HasPermissionToDo):
p_class = CanDoSomethingOrReadOnly
name = "HasPermissionToDo%sOrReadOnly"
|
|
1dd8120dab6cdec4097bea1193a5b5b68d3bfe4f
|
salt/modules/win_groupadd.py
|
salt/modules/win_groupadd.py
|
'''
Manage groups on Windows
'''
def __virtual__():
'''
Set the group module if the kernel is Windows
'''
return 'group' if __grains__['kernel'] == 'Windows' else False
def add(name):
'''
Add the specified group
CLI Example::
salt '*' group.add foo
'''
cmd = 'net localgroup {0} /add'.format(name)
ret = __salt__['cmd.run_all'](cmd)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example::
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('net localgroup {0} /delete'.format(name))
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example::
salt '*' group.info foo
'''
lines = __salt__['cmd.run']('net localgroup {0}'.format(name)).split('\n')
memberline = False
gr_mem = []
gr_name = ''
for line in lines:
if 'Alias name' in line:
comps = line.split(' ', 1)
gr_name = comps[1].strip()
if 'successfully' in line:
memberline = False
if memberline:
gr_mem.append(line.strip())
if '---' in line:
memberline = True
if not gr_name:
return False
return {'name': gr_name,
'passwd': None,
'gid': None,
'members': gr_mem}
def getent():
'''
Return info on all groups
CLI Example::
salt '*' group.getent
'''
ret = []
lines = __salt__['cmd.run']('net localgroup').split('\n')
groupline = False
for line in lines:
if 'successfully' in line:
groupline = False
if groupline:
ret.append(line.strip('*').strip())
if '---' in line:
groupline = True
return ret
|
Add Windows support to group add Add, remove and get info on Local Windows Groups
|
Add Windows support to group add
Add, remove and get info on Local Windows Groups
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add Windows support to group add
Add, remove and get info on Local Windows Groups
|
'''
Manage groups on Windows
'''
def __virtual__():
'''
Set the group module if the kernel is Windows
'''
return 'group' if __grains__['kernel'] == 'Windows' else False
def add(name):
'''
Add the specified group
CLI Example::
salt '*' group.add foo
'''
cmd = 'net localgroup {0} /add'.format(name)
ret = __salt__['cmd.run_all'](cmd)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example::
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('net localgroup {0} /delete'.format(name))
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example::
salt '*' group.info foo
'''
lines = __salt__['cmd.run']('net localgroup {0}'.format(name)).split('\n')
memberline = False
gr_mem = []
gr_name = ''
for line in lines:
if 'Alias name' in line:
comps = line.split(' ', 1)
gr_name = comps[1].strip()
if 'successfully' in line:
memberline = False
if memberline:
gr_mem.append(line.strip())
if '---' in line:
memberline = True
if not gr_name:
return False
return {'name': gr_name,
'passwd': None,
'gid': None,
'members': gr_mem}
def getent():
'''
Return info on all groups
CLI Example::
salt '*' group.getent
'''
ret = []
lines = __salt__['cmd.run']('net localgroup').split('\n')
groupline = False
for line in lines:
if 'successfully' in line:
groupline = False
if groupline:
ret.append(line.strip('*').strip())
if '---' in line:
groupline = True
return ret
|
<commit_before><commit_msg>Add Windows support to group add
Add, remove and get info on Local Windows Groups<commit_after>
|
'''
Manage groups on Windows
'''
def __virtual__():
'''
Set the group module if the kernel is Windows
'''
return 'group' if __grains__['kernel'] == 'Windows' else False
def add(name):
'''
Add the specified group
CLI Example::
salt '*' group.add foo
'''
cmd = 'net localgroup {0} /add'.format(name)
ret = __salt__['cmd.run_all'](cmd)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example::
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('net localgroup {0} /delete'.format(name))
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example::
salt '*' group.info foo
'''
lines = __salt__['cmd.run']('net localgroup {0}'.format(name)).split('\n')
memberline = False
gr_mem = []
gr_name = ''
for line in lines:
if 'Alias name' in line:
comps = line.split(' ', 1)
gr_name = comps[1].strip()
if 'successfully' in line:
memberline = False
if memberline:
gr_mem.append(line.strip())
if '---' in line:
memberline = True
if not gr_name:
return False
return {'name': gr_name,
'passwd': None,
'gid': None,
'members': gr_mem}
def getent():
'''
Return info on all groups
CLI Example::
salt '*' group.getent
'''
ret = []
lines = __salt__['cmd.run']('net localgroup').split('\n')
groupline = False
for line in lines:
if 'successfully' in line:
groupline = False
if groupline:
ret.append(line.strip('*').strip())
if '---' in line:
groupline = True
return ret
|
Add Windows support to group add
Add, remove and get info on Local Windows Groups'''
Manage groups on Windows
'''
def __virtual__():
'''
Set the group module if the kernel is Windows
'''
return 'group' if __grains__['kernel'] == 'Windows' else False
def add(name):
'''
Add the specified group
CLI Example::
salt '*' group.add foo
'''
cmd = 'net localgroup {0} /add'.format(name)
ret = __salt__['cmd.run_all'](cmd)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example::
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('net localgroup {0} /delete'.format(name))
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example::
salt '*' group.info foo
'''
lines = __salt__['cmd.run']('net localgroup {0}'.format(name)).split('\n')
memberline = False
gr_mem = []
gr_name = ''
for line in lines:
if 'Alias name' in line:
comps = line.split(' ', 1)
gr_name = comps[1].strip()
if 'successfully' in line:
memberline = False
if memberline:
gr_mem.append(line.strip())
if '---' in line:
memberline = True
if not gr_name:
return False
return {'name': gr_name,
'passwd': None,
'gid': None,
'members': gr_mem}
def getent():
'''
Return info on all groups
CLI Example::
salt '*' group.getent
'''
ret = []
lines = __salt__['cmd.run']('net localgroup').split('\n')
groupline = False
for line in lines:
if 'successfully' in line:
groupline = False
if groupline:
ret.append(line.strip('*').strip())
if '---' in line:
groupline = True
return ret
|
<commit_before><commit_msg>Add Windows support to group add
Add, remove and get info on Local Windows Groups<commit_after>'''
Manage groups on Windows
'''
def __virtual__():
'''
Set the group module if the kernel is Windows
'''
return 'group' if __grains__['kernel'] == 'Windows' else False
def add(name):
'''
Add the specified group
CLI Example::
salt '*' group.add foo
'''
cmd = 'net localgroup {0} /add'.format(name)
ret = __salt__['cmd.run_all'](cmd)
return not ret['retcode']
def delete(name):
'''
Remove the named group
CLI Example::
salt '*' group.delete foo
'''
ret = __salt__['cmd.run_all']('net localgroup {0} /delete'.format(name))
return not ret['retcode']
def info(name):
'''
Return information about a group
CLI Example::
salt '*' group.info foo
'''
lines = __salt__['cmd.run']('net localgroup {0}'.format(name)).split('\n')
memberline = False
gr_mem = []
gr_name = ''
for line in lines:
if 'Alias name' in line:
comps = line.split(' ', 1)
gr_name = comps[1].strip()
if 'successfully' in line:
memberline = False
if memberline:
gr_mem.append(line.strip())
if '---' in line:
memberline = True
if not gr_name:
return False
return {'name': gr_name,
'passwd': None,
'gid': None,
'members': gr_mem}
def getent():
'''
Return info on all groups
CLI Example::
salt '*' group.getent
'''
ret = []
lines = __salt__['cmd.run']('net localgroup').split('\n')
groupline = False
for line in lines:
if 'successfully' in line:
groupline = False
if groupline:
ret.append(line.strip('*').strip())
if '---' in line:
groupline = True
return ret
|
|
a04dddb09c276300c9a11dcbbe331ce2a32cff29
|
convert.py
|
convert.py
|
from AppKit import NSApplication, NSImage, NSImageCurrentFrame, NSGIFFileType; import sys, os
dirname = sys.argv[1]
files = os.listdir(dirname)
for f in files:
if '.gif' not in f: continue
fName = os.path.join(dirname, f)
tName=os.path.basename(fName)
dir='/tmp/frames/'
os.system('rm -rf %s && mkdir -p %s' % (dir,dir))
app=NSApplication.sharedApplication()
img=NSImage.alloc().initWithContentsOfFile_(fName)
if img:
gifRep=img.representations()[0]
frames=gifRep.valueForProperty_('NSImageFrameCount')
if frames:
for i in range(frames.intValue()):
gifRep.setProperty_withValue_(NSImageCurrentFrame, i)
gifRep.representationUsingType_properties_(NSGIFFileType, None).writeToFile_atomically_(dir + 'tmp' + str(i + 1).zfill(2) + '.gif', True)
for i in range(9):
os.system('mv %stmp0%d.gif %stmp%d.gif' % (dir, i + 1, dir, i+1))
v = "montage "
for i in range(frames.intValue()):
v += '/tmp/frames/tmp%d.gif ' % (i + 1)
v += " -tile x1 -geometry +0+0 -alpha On -background \"rgba(0, 0, 0, 0.0)\" -quality 100 %s" % (fName.replace('gif', 'png'),)
os.system(v)
|
Convert script for .gif -> spritesheet.
|
Convert script for .gif -> spritesheet.
|
Python
|
mit
|
Mytherin/PokemonAI,Mytherin/PokemonAI,Mytherin/PokemonAI
|
Convert script for .gif -> spritesheet.
|
from AppKit import NSApplication, NSImage, NSImageCurrentFrame, NSGIFFileType; import sys, os
dirname = sys.argv[1]
files = os.listdir(dirname)
for f in files:
if '.gif' not in f: continue
fName = os.path.join(dirname, f)
tName=os.path.basename(fName)
dir='/tmp/frames/'
os.system('rm -rf %s && mkdir -p %s' % (dir,dir))
app=NSApplication.sharedApplication()
img=NSImage.alloc().initWithContentsOfFile_(fName)
if img:
gifRep=img.representations()[0]
frames=gifRep.valueForProperty_('NSImageFrameCount')
if frames:
for i in range(frames.intValue()):
gifRep.setProperty_withValue_(NSImageCurrentFrame, i)
gifRep.representationUsingType_properties_(NSGIFFileType, None).writeToFile_atomically_(dir + 'tmp' + str(i + 1).zfill(2) + '.gif', True)
for i in range(9):
os.system('mv %stmp0%d.gif %stmp%d.gif' % (dir, i + 1, dir, i+1))
v = "montage "
for i in range(frames.intValue()):
v += '/tmp/frames/tmp%d.gif ' % (i + 1)
v += " -tile x1 -geometry +0+0 -alpha On -background \"rgba(0, 0, 0, 0.0)\" -quality 100 %s" % (fName.replace('gif', 'png'),)
os.system(v)
|
<commit_before><commit_msg>Convert script for .gif -> spritesheet.<commit_after>
|
from AppKit import NSApplication, NSImage, NSImageCurrentFrame, NSGIFFileType; import sys, os
dirname = sys.argv[1]
files = os.listdir(dirname)
for f in files:
if '.gif' not in f: continue
fName = os.path.join(dirname, f)
tName=os.path.basename(fName)
dir='/tmp/frames/'
os.system('rm -rf %s && mkdir -p %s' % (dir,dir))
app=NSApplication.sharedApplication()
img=NSImage.alloc().initWithContentsOfFile_(fName)
if img:
gifRep=img.representations()[0]
frames=gifRep.valueForProperty_('NSImageFrameCount')
if frames:
for i in range(frames.intValue()):
gifRep.setProperty_withValue_(NSImageCurrentFrame, i)
gifRep.representationUsingType_properties_(NSGIFFileType, None).writeToFile_atomically_(dir + 'tmp' + str(i + 1).zfill(2) + '.gif', True)
for i in range(9):
os.system('mv %stmp0%d.gif %stmp%d.gif' % (dir, i + 1, dir, i+1))
v = "montage "
for i in range(frames.intValue()):
v += '/tmp/frames/tmp%d.gif ' % (i + 1)
v += " -tile x1 -geometry +0+0 -alpha On -background \"rgba(0, 0, 0, 0.0)\" -quality 100 %s" % (fName.replace('gif', 'png'),)
os.system(v)
|
Convert script for .gif -> spritesheet.from AppKit import NSApplication, NSImage, NSImageCurrentFrame, NSGIFFileType; import sys, os
dirname = sys.argv[1]
files = os.listdir(dirname)
for f in files:
if '.gif' not in f: continue
fName = os.path.join(dirname, f)
tName=os.path.basename(fName)
dir='/tmp/frames/'
os.system('rm -rf %s && mkdir -p %s' % (dir,dir))
app=NSApplication.sharedApplication()
img=NSImage.alloc().initWithContentsOfFile_(fName)
if img:
gifRep=img.representations()[0]
frames=gifRep.valueForProperty_('NSImageFrameCount')
if frames:
for i in range(frames.intValue()):
gifRep.setProperty_withValue_(NSImageCurrentFrame, i)
gifRep.representationUsingType_properties_(NSGIFFileType, None).writeToFile_atomically_(dir + 'tmp' + str(i + 1).zfill(2) + '.gif', True)
for i in range(9):
os.system('mv %stmp0%d.gif %stmp%d.gif' % (dir, i + 1, dir, i+1))
v = "montage "
for i in range(frames.intValue()):
v += '/tmp/frames/tmp%d.gif ' % (i + 1)
v += " -tile x1 -geometry +0+0 -alpha On -background \"rgba(0, 0, 0, 0.0)\" -quality 100 %s" % (fName.replace('gif', 'png'),)
os.system(v)
|
<commit_before><commit_msg>Convert script for .gif -> spritesheet.<commit_after>from AppKit import NSApplication, NSImage, NSImageCurrentFrame, NSGIFFileType; import sys, os
dirname = sys.argv[1]
files = os.listdir(dirname)
for f in files:
if '.gif' not in f: continue
fName = os.path.join(dirname, f)
tName=os.path.basename(fName)
dir='/tmp/frames/'
os.system('rm -rf %s && mkdir -p %s' % (dir,dir))
app=NSApplication.sharedApplication()
img=NSImage.alloc().initWithContentsOfFile_(fName)
if img:
gifRep=img.representations()[0]
frames=gifRep.valueForProperty_('NSImageFrameCount')
if frames:
for i in range(frames.intValue()):
gifRep.setProperty_withValue_(NSImageCurrentFrame, i)
gifRep.representationUsingType_properties_(NSGIFFileType, None).writeToFile_atomically_(dir + 'tmp' + str(i + 1).zfill(2) + '.gif', True)
for i in range(9):
os.system('mv %stmp0%d.gif %stmp%d.gif' % (dir, i + 1, dir, i+1))
v = "montage "
for i in range(frames.intValue()):
v += '/tmp/frames/tmp%d.gif ' % (i + 1)
v += " -tile x1 -geometry +0+0 -alpha On -background \"rgba(0, 0, 0, 0.0)\" -quality 100 %s" % (fName.replace('gif', 'png'),)
os.system(v)
|
|
ed7cc112adb6d54f1c2b79b941ab5455a17d9442
|
FlaskApp/app_ls.py
|
FlaskApp/app_ls.py
|
import subprocess
import datetime
from flask import Flask, render_template, redirect, url_for, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/outlet')
def rf_outlet():
return render_template('rf-outlet.html')
@app.route('/hello/<name>')
def hello(name):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'name': name,
'time': timeString
}
return render_template('hello.html', **templateData)
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('/'))
return render_template('login.html', error=error)
@app.route('/postmethod', methods=['POST'])
def get_post():
outlet, status = request.form['outlet'], request.form['status']
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M")
print('Time: %s | Outlet: %s | Status: %s' % (time, outlet, status))
if status == 'on':
out = subprocess.check_output('ls')
print(out)
else:
out = subprocess.check_output('pwd')
print(out)
return outlet
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
Add subprocess trial for webserver app
|
Add subprocess trial for webserver app
|
Python
|
bsd-3-clause
|
kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi
|
Add subprocess trial for webserver app
|
import subprocess
import datetime
from flask import Flask, render_template, redirect, url_for, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/outlet')
def rf_outlet():
return render_template('rf-outlet.html')
@app.route('/hello/<name>')
def hello(name):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'name': name,
'time': timeString
}
return render_template('hello.html', **templateData)
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('/'))
return render_template('login.html', error=error)
@app.route('/postmethod', methods=['POST'])
def get_post():
outlet, status = request.form['outlet'], request.form['status']
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M")
print('Time: %s | Outlet: %s | Status: %s' % (time, outlet, status))
if status == 'on':
out = subprocess.check_output('ls')
print(out)
else:
out = subprocess.check_output('pwd')
print(out)
return outlet
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
<commit_before><commit_msg>Add subprocess trial for webserver app<commit_after>
|
import subprocess
import datetime
from flask import Flask, render_template, redirect, url_for, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/outlet')
def rf_outlet():
return render_template('rf-outlet.html')
@app.route('/hello/<name>')
def hello(name):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'name': name,
'time': timeString
}
return render_template('hello.html', **templateData)
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('/'))
return render_template('login.html', error=error)
@app.route('/postmethod', methods=['POST'])
def get_post():
outlet, status = request.form['outlet'], request.form['status']
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M")
print('Time: %s | Outlet: %s | Status: %s' % (time, outlet, status))
if status == 'on':
out = subprocess.check_output('ls')
print(out)
else:
out = subprocess.check_output('pwd')
print(out)
return outlet
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
Add subprocess trial for webserver appimport subprocess
import datetime
from flask import Flask, render_template, redirect, url_for, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/outlet')
def rf_outlet():
return render_template('rf-outlet.html')
@app.route('/hello/<name>')
def hello(name):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'name': name,
'time': timeString
}
return render_template('hello.html', **templateData)
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('/'))
return render_template('login.html', error=error)
@app.route('/postmethod', methods=['POST'])
def get_post():
outlet, status = request.form['outlet'], request.form['status']
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M")
print('Time: %s | Outlet: %s | Status: %s' % (time, outlet, status))
if status == 'on':
out = subprocess.check_output('ls')
print(out)
else:
out = subprocess.check_output('pwd')
print(out)
return outlet
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
<commit_before><commit_msg>Add subprocess trial for webserver app<commit_after>import subprocess
import datetime
from flask import Flask, render_template, redirect, url_for, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/outlet')
def rf_outlet():
return render_template('rf-outlet.html')
@app.route('/hello/<name>')
def hello(name):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d %H:%M")
templateData = {
'name': name,
'time': timeString
}
return render_template('hello.html', **templateData)
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('/'))
return render_template('login.html', error=error)
@app.route('/postmethod', methods=['POST'])
def get_post():
outlet, status = request.form['outlet'], request.form['status']
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M")
print('Time: %s | Outlet: %s | Status: %s' % (time, outlet, status))
if status == 'on':
out = subprocess.check_output('ls')
print(out)
else:
out = subprocess.check_output('pwd')
print(out)
return outlet
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
|
b6e8d93b661b4e65ecc6f5c150fcc1b1a4f26dc3
|
utils.py
|
utils.py
|
from numpy import array, cross, eye, dot
from scipy.linalg import expm3,norm
from math import sin, cos
def rotate(axis, vector, angle):
if axis == 'x':
m = array([
[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]
])
elif axis == 'y':
m = array([
[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]
])
elif axis == 'z':
m = array([
[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]
])
else:
raise Exception("axis needs to be x, y or z")
return dot(m, vector)
|
Add silly un-mathy rotation function
|
Add silly un-mathy rotation function
|
Python
|
mit
|
kirberich/3dscanner
|
Add silly un-mathy rotation function
|
from numpy import array, cross, eye, dot
from scipy.linalg import expm3,norm
from math import sin, cos
def rotate(axis, vector, angle):
if axis == 'x':
m = array([
[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]
])
elif axis == 'y':
m = array([
[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]
])
elif axis == 'z':
m = array([
[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]
])
else:
raise Exception("axis needs to be x, y or z")
return dot(m, vector)
|
<commit_before><commit_msg>Add silly un-mathy rotation function<commit_after>
|
from numpy import array, cross, eye, dot
from scipy.linalg import expm3,norm
from math import sin, cos
def rotate(axis, vector, angle):
if axis == 'x':
m = array([
[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]
])
elif axis == 'y':
m = array([
[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]
])
elif axis == 'z':
m = array([
[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]
])
else:
raise Exception("axis needs to be x, y or z")
return dot(m, vector)
|
Add silly un-mathy rotation functionfrom numpy import array, cross, eye, dot
from scipy.linalg import expm3,norm
from math import sin, cos
def rotate(axis, vector, angle):
if axis == 'x':
m = array([
[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]
])
elif axis == 'y':
m = array([
[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]
])
elif axis == 'z':
m = array([
[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]
])
else:
raise Exception("axis needs to be x, y or z")
return dot(m, vector)
|
<commit_before><commit_msg>Add silly un-mathy rotation function<commit_after>from numpy import array, cross, eye, dot
from scipy.linalg import expm3,norm
from math import sin, cos
def rotate(axis, vector, angle):
if axis == 'x':
m = array([
[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]
])
elif axis == 'y':
m = array([
[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]
])
elif axis == 'z':
m = array([
[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]
])
else:
raise Exception("axis needs to be x, y or z")
return dot(m, vector)
|
|
a1f13dd46cdd68f72002ba13d4875af659694f94
|
models/base_olims_model.py
|
models/base_olims_model.py
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
add_a_getter(cls, field)
pass
pass
pass
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
# add_a_getter(cls, field)
pass
pass
pass
|
Make comment of 'add_a_getter' method
|
Make comment of 'add_a_getter' method
|
Python
|
agpl-3.0
|
sciCloud/OLiMS,yasir1brahim/OLiMS,sciCloud/OLiMS,sciCloud/OLiMS
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
add_a_getter(cls, field)
pass
pass
pass
Make comment of 'add_a_getter' method
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
# add_a_getter(cls, field)
pass
pass
pass
|
<commit_before>import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
add_a_getter(cls, field)
pass
pass
pass
<commit_msg>Make comment of 'add_a_getter' method<commit_after>
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
# add_a_getter(cls, field)
pass
pass
pass
|
import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
add_a_getter(cls, field)
pass
pass
pass
Make comment of 'add_a_getter' methodimport copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
# add_a_getter(cls, field)
pass
pass
pass
|
<commit_before>import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
add_a_getter(cls, field)
pass
pass
pass
<commit_msg>Make comment of 'add_a_getter' method<commit_after>import copy
def add_a_field(cls, field):
setattr(cls, field.string, field)
pass
def add_a_getter(cls, field):
fieldname = field.string
getter_template = copy.deepcopy(field.get_getterTemplate())
getter_template.__doc__ = "get%s - method to get value of field: %s" % (fieldname,fieldname)
getter_template.__name__ = "get%s" % fieldname
setattr(cls, getter_template.__name__, classmethod(getter_template))
pass
class BaseOLiMSModel(object):
'''
This is the base class of all OLiMS model. In addition to models.Model, every OLiMS model will
be derived from this class and get all its code from a Bika Content class i.e. a Bika
content class will be converted to OLiMS model class by removing irrelevant code,
and adding derivation from this class, with the calling of following in the end
DerivedModel_Of_BaseOLiMSModel.initialze(bika_schema)
'''
@classmethod
def initialze(cls, schema):
# take out each filed from the passed schema and:
# 1. make a model variable for it, and assign this field to this variable
# 2. make getter methods for each model variable defined in step 1
for field in schema:
add_a_field(cls, field)
# add_a_getter(cls, field)
pass
pass
pass
|
ba9214f7fca609948130d2ae56bb19805b79d59c
|
tests/rules/test_git_branch_list.py
|
tests/rules/test_git_branch_list.py
|
from thefuck import shells
from thefuck.rules.git_branch_list import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('git branch list'), None)
def test_not_match():
assert not match(Command(), None)
assert not match(Command('git commit'), None)
assert not match(Command('git branch'), None)
assert not match(Command('git stash list'), None)
def test_get_new_command():
assert (get_new_command(Command('git branch list'), None) ==
shells.and_('git branch --delete list', 'git branch'))
|
Add a test for git_branch_list rule
|
Add a test for git_branch_list rule
|
Python
|
mit
|
beni55/thefuck,mlk/thefuck,zhangzhishan/thefuck,AntonChankin/thefuck,suxinde2009/thefuck,nvbn/thefuck,NguyenHoaiNam/thefuck,levythu/thefuck,PLNech/thefuck,redreamality/thefuck,thinkerchan/thefuck,princeofdarkness76/thefuck,BertieJim/thefuck,ostree/thefuck,bigplus/thefuck,vanita5/thefuck,beni55/thefuck,sekaiamber/thefuck,barneyElDinosaurio/thefuck,roth1002/thefuck,Clpsplug/thefuck,subajat1/thefuck,subajat1/thefuck,AntonChankin/thefuck,nvbn/thefuck,vanita5/thefuck,thesoulkiller/thefuck,PLNech/thefuck,LawrenceHan/thefuck,scorphus/thefuck,qingying5810/thefuck,thinkerchan/thefuck,SimenB/thefuck,barneyElDinosaurio/thefuck,BertieJim/thefuck,artiya4u/thefuck,levythu/thefuck,bigplus/thefuck,Clpsplug/thefuck,bugaevc/thefuck,hxddh/thefuck,gaurav9991/thefuck,ytjiang/thefuck,scorphus/thefuck,roth1002/thefuck,Aeron/thefuck,princeofdarkness76/thefuck,LawrenceHan/thefuck,ostree/thefuck,gogobebe2/thefuck,SimenB/thefuck,MJerty/thefuck,thesoulkiller/thefuck,petr-tichy/thefuck,mcarton/thefuck,MJerty/thefuck,manashmndl/thefuck,mlk/thefuck,lawrencebenson/thefuck,manashmndl/thefuck,mbbill/thefuck,lawrencebenson/thefuck,redreamality/thefuck,mcarton/thefuck,hxddh/thefuck
|
Add a test for git_branch_list rule
|
from thefuck import shells
from thefuck.rules.git_branch_list import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('git branch list'), None)
def test_not_match():
assert not match(Command(), None)
assert not match(Command('git commit'), None)
assert not match(Command('git branch'), None)
assert not match(Command('git stash list'), None)
def test_get_new_command():
assert (get_new_command(Command('git branch list'), None) ==
shells.and_('git branch --delete list', 'git branch'))
|
<commit_before><commit_msg>Add a test for git_branch_list rule<commit_after>
|
from thefuck import shells
from thefuck.rules.git_branch_list import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('git branch list'), None)
def test_not_match():
assert not match(Command(), None)
assert not match(Command('git commit'), None)
assert not match(Command('git branch'), None)
assert not match(Command('git stash list'), None)
def test_get_new_command():
assert (get_new_command(Command('git branch list'), None) ==
shells.and_('git branch --delete list', 'git branch'))
|
Add a test for git_branch_list rulefrom thefuck import shells
from thefuck.rules.git_branch_list import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('git branch list'), None)
def test_not_match():
assert not match(Command(), None)
assert not match(Command('git commit'), None)
assert not match(Command('git branch'), None)
assert not match(Command('git stash list'), None)
def test_get_new_command():
assert (get_new_command(Command('git branch list'), None) ==
shells.and_('git branch --delete list', 'git branch'))
|
<commit_before><commit_msg>Add a test for git_branch_list rule<commit_after>from thefuck import shells
from thefuck.rules.git_branch_list import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('git branch list'), None)
def test_not_match():
assert not match(Command(), None)
assert not match(Command('git commit'), None)
assert not match(Command('git branch'), None)
assert not match(Command('git stash list'), None)
def test_get_new_command():
assert (get_new_command(Command('git branch list'), None) ==
shells.and_('git branch --delete list', 'git branch'))
|
|
cecd767ef94f4bc890b8b19d2404528a7c4170bb
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='LpSchedule',
version='0.1',
description='A API for Lviv Polytechnik schedule',
author='Stepanov Valentyn',
author_email='mr.valentyn.stepanov@gmail.com',
url='http://example.com',
install_requires=['Flask>=0.10.1',
'Flask-Script>=2.0.5','grab>=0.6.30','gunicorn>=19.5.0',
'itsdangerous>=0.24','Jinja2>=2.8','lxml>=3.6.0',
'MarkupSafe>=0.23', 'pycurl>=7.43.0','pytils>=0.3',
'selection>=0.0.11','six>=1.10.0','user-agent>=0.1.4','weblib>=0.1.20',
'Werkzeug>=0.11.9]']
)
|
Add pythonic way to install
|
Add pythonic way to install
|
Python
|
mit
|
stenvix/lpschedule,stepanov-valentin/lpschedule,stepanov-valentin/lpschedule,stenvix/lpschedule,stepanov-valentin/lpschedule,stepanov-valentin/lpschedule,stenvix/lpschedule,stenvix/lpschedule
|
Add pythonic way to install
|
from setuptools import setup
setup(name='LpSchedule',
version='0.1',
description='A API for Lviv Polytechnik schedule',
author='Stepanov Valentyn',
author_email='mr.valentyn.stepanov@gmail.com',
url='http://example.com',
install_requires=['Flask>=0.10.1',
'Flask-Script>=2.0.5','grab>=0.6.30','gunicorn>=19.5.0',
'itsdangerous>=0.24','Jinja2>=2.8','lxml>=3.6.0',
'MarkupSafe>=0.23', 'pycurl>=7.43.0','pytils>=0.3',
'selection>=0.0.11','six>=1.10.0','user-agent>=0.1.4','weblib>=0.1.20',
'Werkzeug>=0.11.9]']
)
|
<commit_before><commit_msg>Add pythonic way to install<commit_after>
|
from setuptools import setup
setup(name='LpSchedule',
version='0.1',
description='A API for Lviv Polytechnik schedule',
author='Stepanov Valentyn',
author_email='mr.valentyn.stepanov@gmail.com',
url='http://example.com',
install_requires=['Flask>=0.10.1',
'Flask-Script>=2.0.5','grab>=0.6.30','gunicorn>=19.5.0',
'itsdangerous>=0.24','Jinja2>=2.8','lxml>=3.6.0',
'MarkupSafe>=0.23', 'pycurl>=7.43.0','pytils>=0.3',
'selection>=0.0.11','six>=1.10.0','user-agent>=0.1.4','weblib>=0.1.20',
'Werkzeug>=0.11.9]']
)
|
Add pythonic way to installfrom setuptools import setup
setup(name='LpSchedule',
version='0.1',
description='A API for Lviv Polytechnik schedule',
author='Stepanov Valentyn',
author_email='mr.valentyn.stepanov@gmail.com',
url='http://example.com',
install_requires=['Flask>=0.10.1',
'Flask-Script>=2.0.5','grab>=0.6.30','gunicorn>=19.5.0',
'itsdangerous>=0.24','Jinja2>=2.8','lxml>=3.6.0',
'MarkupSafe>=0.23', 'pycurl>=7.43.0','pytils>=0.3',
'selection>=0.0.11','six>=1.10.0','user-agent>=0.1.4','weblib>=0.1.20',
'Werkzeug>=0.11.9]']
)
|
<commit_before><commit_msg>Add pythonic way to install<commit_after>from setuptools import setup
setup(name='LpSchedule',
version='0.1',
description='A API for Lviv Polytechnik schedule',
author='Stepanov Valentyn',
author_email='mr.valentyn.stepanov@gmail.com',
url='http://example.com',
install_requires=['Flask>=0.10.1',
'Flask-Script>=2.0.5','grab>=0.6.30','gunicorn>=19.5.0',
'itsdangerous>=0.24','Jinja2>=2.8','lxml>=3.6.0',
'MarkupSafe>=0.23', 'pycurl>=7.43.0','pytils>=0.3',
'selection>=0.0.11','six>=1.10.0','user-agent>=0.1.4','weblib>=0.1.20',
'Werkzeug>=0.11.9]']
)
|
|
95edd6f6d3076e78b995b9b02e7d32938734cbf2
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.8',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.9',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
Prepare for next development cycle.
|
Prepare for next development cycle.
|
Python
|
mit
|
coldfix/udiskie,khardix/udiskie,coldfix/udiskie,pstray/udiskie,pstray/udiskie,mathstuf/udiskie
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.8',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
Prepare for next development cycle.
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.9',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
<commit_before>from distutils.core import setup
setup(
name='udiskie',
version='0.3.8',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
<commit_msg>Prepare for next development cycle.<commit_after>
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.9',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.8',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
Prepare for next development cycle.from distutils.core import setup
setup(
name='udiskie',
version='0.3.9',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
<commit_before>from distutils.core import setup
setup(
name='udiskie',
version='0.3.8',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
<commit_msg>Prepare for next development cycle.<commit_after>from distutils.core import setup
setup(
name='udiskie',
version='0.3.9',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
acf0b48055c339e67ead4b85c90a07ffdce60bf1
|
py/max-consecutive-ones.py
|
py/max-consecutive-ones.py
|
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = 0
m = 0
for c in nums:
if c == 0:
n = 0
else:
n += 1
m = max(n, m)
return m
|
Add py solution for 485. Max Consecutive Ones
|
Add py solution for 485. Max Consecutive Ones
485. Max Consecutive Ones: https://leetcode.com/problems/max-consecutive-ones/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 485. Max Consecutive Ones
485. Max Consecutive Ones: https://leetcode.com/problems/max-consecutive-ones/
|
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = 0
m = 0
for c in nums:
if c == 0:
n = 0
else:
n += 1
m = max(n, m)
return m
|
<commit_before><commit_msg>Add py solution for 485. Max Consecutive Ones
485. Max Consecutive Ones: https://leetcode.com/problems/max-consecutive-ones/<commit_after>
|
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = 0
m = 0
for c in nums:
if c == 0:
n = 0
else:
n += 1
m = max(n, m)
return m
|
Add py solution for 485. Max Consecutive Ones
485. Max Consecutive Ones: https://leetcode.com/problems/max-consecutive-ones/class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = 0
m = 0
for c in nums:
if c == 0:
n = 0
else:
n += 1
m = max(n, m)
return m
|
<commit_before><commit_msg>Add py solution for 485. Max Consecutive Ones
485. Max Consecutive Ones: https://leetcode.com/problems/max-consecutive-ones/<commit_after>class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = 0
m = 0
for c in nums:
if c == 0:
n = 0
else:
n += 1
m = max(n, m)
return m
|
|
d86d117963ac87bfb11e731ea98d1803b2fcd609
|
test/test_utils.py
|
test/test_utils.py
|
# Copyright 2021 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pywayland.utils import AnonymousFile
def test_anonymous_file():
with AnonymousFile(10) as fd:
assert fd > 0
f = AnonymousFile(10)
f.close()
f.open()
with pytest.raises(IOError, match="File is already open"):
f.open()
f.close()
f.close()
|
Add test on anonymous file
|
Add test on anonymous file
|
Python
|
apache-2.0
|
flacjacket/pywayland
|
Add test on anonymous file
|
# Copyright 2021 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pywayland.utils import AnonymousFile
def test_anonymous_file():
with AnonymousFile(10) as fd:
assert fd > 0
f = AnonymousFile(10)
f.close()
f.open()
with pytest.raises(IOError, match="File is already open"):
f.open()
f.close()
f.close()
|
<commit_before><commit_msg>Add test on anonymous file<commit_after>
|
# Copyright 2021 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pywayland.utils import AnonymousFile
def test_anonymous_file():
with AnonymousFile(10) as fd:
assert fd > 0
f = AnonymousFile(10)
f.close()
f.open()
with pytest.raises(IOError, match="File is already open"):
f.open()
f.close()
f.close()
|
Add test on anonymous file# Copyright 2021 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pywayland.utils import AnonymousFile
def test_anonymous_file():
with AnonymousFile(10) as fd:
assert fd > 0
f = AnonymousFile(10)
f.close()
f.open()
with pytest.raises(IOError, match="File is already open"):
f.open()
f.close()
f.close()
|
<commit_before><commit_msg>Add test on anonymous file<commit_after># Copyright 2021 Sean Vig
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pywayland.utils import AnonymousFile
def test_anonymous_file():
with AnonymousFile(10) as fd:
assert fd > 0
f = AnonymousFile(10)
f.close()
f.open()
with pytest.raises(IOError, match="File is already open"):
f.open()
f.close()
f.close()
|
|
43cb60efabfdaab77fa2c8eee1a5d8730a321db1
|
samples/query_interfaces.py
|
samples/query_interfaces.py
|
#!/usr/bin/env python
import requests
from orionsdk import SwisClient
npm_server = 'localhost'
username = 'admin'
password = ''
verify = False
if not verify:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def main():
swis = SwisClient(npm_server, username, password)
query = """
SELECT TOP 10
n.NodeID,
n.Caption AS NodeName,
i.InterfaceID,
i.Caption AS InterfaceName
FROM
Orion.Nodes n
JOIN
Orion.NPM.Interfaces i ON n.NodeID = i.NodeID
"""
results = swis.query(query)
for row in results['results']:
print("{NodeID} [{NodeName}] : {InterfaceID} [{InterfaceName}]".format(**row))
if __name__ == '__main__':
main()
|
Add example for pulling interfaces.
|
Add example for pulling interfaces.
|
Python
|
apache-2.0
|
solarwinds/orionsdk-python
|
Add example for pulling interfaces.
|
#!/usr/bin/env python
import requests
from orionsdk import SwisClient
npm_server = 'localhost'
username = 'admin'
password = ''
verify = False
if not verify:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def main():
swis = SwisClient(npm_server, username, password)
query = """
SELECT TOP 10
n.NodeID,
n.Caption AS NodeName,
i.InterfaceID,
i.Caption AS InterfaceName
FROM
Orion.Nodes n
JOIN
Orion.NPM.Interfaces i ON n.NodeID = i.NodeID
"""
results = swis.query(query)
for row in results['results']:
print("{NodeID} [{NodeName}] : {InterfaceID} [{InterfaceName}]".format(**row))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example for pulling interfaces.<commit_after>
|
#!/usr/bin/env python
import requests
from orionsdk import SwisClient
npm_server = 'localhost'
username = 'admin'
password = ''
verify = False
if not verify:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def main():
swis = SwisClient(npm_server, username, password)
query = """
SELECT TOP 10
n.NodeID,
n.Caption AS NodeName,
i.InterfaceID,
i.Caption AS InterfaceName
FROM
Orion.Nodes n
JOIN
Orion.NPM.Interfaces i ON n.NodeID = i.NodeID
"""
results = swis.query(query)
for row in results['results']:
print("{NodeID} [{NodeName}] : {InterfaceID} [{InterfaceName}]".format(**row))
if __name__ == '__main__':
main()
|
Add example for pulling interfaces.#!/usr/bin/env python
import requests
from orionsdk import SwisClient
npm_server = 'localhost'
username = 'admin'
password = ''
verify = False
if not verify:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def main():
swis = SwisClient(npm_server, username, password)
query = """
SELECT TOP 10
n.NodeID,
n.Caption AS NodeName,
i.InterfaceID,
i.Caption AS InterfaceName
FROM
Orion.Nodes n
JOIN
Orion.NPM.Interfaces i ON n.NodeID = i.NodeID
"""
results = swis.query(query)
for row in results['results']:
print("{NodeID} [{NodeName}] : {InterfaceID} [{InterfaceName}]".format(**row))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add example for pulling interfaces.<commit_after>#!/usr/bin/env python
import requests
from orionsdk import SwisClient
npm_server = 'localhost'
username = 'admin'
password = ''
verify = False
if not verify:
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def main():
swis = SwisClient(npm_server, username, password)
query = """
SELECT TOP 10
n.NodeID,
n.Caption AS NodeName,
i.InterfaceID,
i.Caption AS InterfaceName
FROM
Orion.Nodes n
JOIN
Orion.NPM.Interfaces i ON n.NodeID = i.NodeID
"""
results = swis.query(query)
for row in results['results']:
print("{NodeID} [{NodeName}] : {InterfaceID} [{InterfaceName}]".format(**row))
if __name__ == '__main__':
main()
|
|
358cdd4b89221cbb02e7b04fc83cebb06570b03a
|
mezzanine/twitter/defaults.py
|
mezzanine/twitter/defaults.py
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="#django",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="django mezzanine",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
Update the default twitter query since it's been flooded by movie tweets.
|
Update the default twitter query since it's been flooded by movie tweets.
|
Python
|
bsd-2-clause
|
readevalprint/mezzanine,theclanks/mezzanine,dovydas/mezzanine,scarcry/snm-mezzanine,industrydive/mezzanine,ryneeverett/mezzanine,mush42/mezzanine,AlexHill/mezzanine,industrydive/mezzanine,spookylukey/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,Cajoline/mezzanine,webounty/mezzanine,gradel/mezzanine,dekomote/mezzanine-modeltranslation-backport,spookylukey/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,vladir/mezzanine,Cicero-Zhao/mezzanine,geodesign/mezzanine,fusionbox/mezzanine,stbarnabas/mezzanine,orlenko/sfpirg,frankier/mezzanine,douglaskastle/mezzanine,damnfine/mezzanine,PegasusWang/mezzanine,stephenmcd/mezzanine,eino-makitalo/mezzanine,spookylukey/mezzanine,wbtuomela/mezzanine,agepoly/mezzanine,molokov/mezzanine,sjdines/mezzanine,christianwgd/mezzanine,dsanders11/mezzanine,readevalprint/mezzanine,batpad/mezzanine,frankchin/mezzanine,AlexHill/mezzanine,frankier/mezzanine,damnfine/mezzanine,Cicero-Zhao/mezzanine,wbtuomela/mezzanine,industrydive/mezzanine,SoLoHiC/mezzanine,frankchin/mezzanine,jjz/mezzanine,Skytorn86/mezzanine,mush42/mezzanine,ZeroXn/mezzanine,biomassives/mezzanine,gradel/mezzanine,emile2016/mezzanine,scarcry/snm-mezzanine,geodesign/mezzanine,viaregio/mezzanine,fusionbox/mezzanine,christianwgd/mezzanine,webounty/mezzanine,ZeroXn/mezzanine,Kniyl/mezzanine,cccs-web/mezzanine,molokov/mezzanine,tuxinhang1989/mezzanine,saintbird/mezzanine,ryneeverett/mezzanine,frankier/mezzanine,adrian-the-git/mezzanine,dsanders11/mezzanine,wyzex/mezzanine,sjdines/mezzanine,Skytorn86/mezzanine,Kniyl/mezzanine,orlenko/sfpirg,theclanks/mezzanine,orlenko/plei,dustinrb/mezzanine,agepoly/mezzanine,eino-makitalo/mezzanine,dovydas/mezzanine,joshcartme/mezzanine,sjdines/mezzanine,promil23/mezzanine,Kniyl/mezzanine,jjz/mezzanine,nikolas/mezzanine,sjuxax/mezzanine,joshcartme/mezzanine,molokov/mezzanine,dustinrb/mezzanine,vladir/mezzanine,tuxinhang1989/mezzanine,nikolas/mezzanine,wrwrwr/mezzanine,wyzex/mezzanine,saintbird/mezzanine,orlenko/plei,scarcry/snm-mezzanine,dovydas/mezzanine,nikolas/mezzanine,stephenmcd/mezzanine,ZeroXn/mezzanine,joshcartme/mezzanine,tuxinhang1989/mezzanine,dekomote/mezzanine-modeltranslation-backport,theclanks/mezzanine,ryneeverett/mezzanine,PegasusWang/mezzanine,damnfine/mezzanine,vladir/mezzanine,biomassives/mezzanine,adrian-the-git/mezzanine,orlenko/sfpirg,promil23/mezzanine,jjz/mezzanine,dustinrb/mezzanine,douglaskastle/mezzanine,SoLoHiC/mezzanine,stbarnabas/mezzanine,agepoly/mezzanine,dsanders11/mezzanine,batpad/mezzanine,wyzex/mezzanine,frankchin/mezzanine,adrian-the-git/mezzanine,PegasusWang/mezzanine,orlenko/plei,jerivas/mezzanine,wrwrwr/mezzanine,christianwgd/mezzanine,cccs-web/mezzanine,saintbird/mezzanine,jerivas/mezzanine,mush42/mezzanine,viaregio/mezzanine,webounty/mezzanine,dekomote/mezzanine-modeltranslation-backport,stephenmcd/mezzanine,jerivas/mezzanine,wbtuomela/mezzanine,biomassives/mezzanine,sjuxax/mezzanine,viaregio/mezzanine,promil23/mezzanine,gradel/mezzanine,SoLoHiC/mezzanine,emile2016/mezzanine,geodesign/mezzanine,Cajoline/mezzanine,Skytorn86/mezzanine,readevalprint/mezzanine,emile2016/mezzanine
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="#django",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
Update the default twitter query since it's been flooded by movie tweets.
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="django mezzanine",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
<commit_before>"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="#django",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
<commit_msg>Update the default twitter query since it's been flooded by movie tweets.<commit_after>
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="django mezzanine",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="#django",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
Update the default twitter query since it's been flooded by movie tweets."""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="django mezzanine",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
<commit_before>"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="#django",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
<commit_msg>Update the default twitter query since it's been flooded by movie tweets.<commit_after>"""
Default settings for the ``mezzanine.twitter`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_SEARCH
register_setting(
name="TWITTER_DEFAULT_QUERY_TYPE",
label=_("Default Twitter Query Type"),
description=_("Type of query that will be used to retrieve tweets for "
"the default Twitter feed."),
editable=True,
default=QUERY_TYPE_SEARCH,
choices=QUERY_TYPE_CHOICES,
)
register_setting(
name="TWITTER_DEFAULT_QUERY",
label=_("Default Twitter Query"),
description=_("Twitter query to use for the default query type."),
editable=True,
default="django mezzanine",
)
register_setting(
name="TWITTER_DEFAULT_NUM_TWEETS",
label=_("Default Number of Tweets"),
description=_("Number of tweets to display in the default Twitter feed."),
editable=True,
default=3,
)
|
d79a7f4e8be3cc9f0eb27da562bd0f1143005368
|
examples/power_on_swarm.py
|
examples/power_on_swarm.py
|
#!/usr/bin/python
import time
import sys
from psphere.client import Client
from psphere.managedobjects import VirtualMachine
scatter_secs = 8
nodes = sys.argv[1:]
client = Client()
print("Powering on %s VMs" % len(nodes))
print("Estimated run time with %s seconds sleep between each power on: %s" %
(scatter_secs, scatter_secs*len(nodes)))
for node in nodes:
try:
vm = VirtualMachine.get(client, name=node, properties=["name", "runtime"])
except ObjectNotFoundError:
print("WARNING: Could not find VM with name %s" % node)
pass
print("Powering on %s" % vm.name)
if vm.runtime.powerState == "poweredOn":
print("%s is already powered on." % vm.name)
continue
task = vm.PowerOnVM_Task()
time.sleep(scatter_secs)
|
Add a new example of powering on many VMs at once
|
Add a new example of powering on many VMs at once
|
Python
|
apache-2.0
|
graphite-server/psphere,jkinred/psphere
|
Add a new example of powering on many VMs at once
|
#!/usr/bin/python
import time
import sys
from psphere.client import Client
from psphere.managedobjects import VirtualMachine
scatter_secs = 8
nodes = sys.argv[1:]
client = Client()
print("Powering on %s VMs" % len(nodes))
print("Estimated run time with %s seconds sleep between each power on: %s" %
(scatter_secs, scatter_secs*len(nodes)))
for node in nodes:
try:
vm = VirtualMachine.get(client, name=node, properties=["name", "runtime"])
except ObjectNotFoundError:
print("WARNING: Could not find VM with name %s" % node)
pass
print("Powering on %s" % vm.name)
if vm.runtime.powerState == "poweredOn":
print("%s is already powered on." % vm.name)
continue
task = vm.PowerOnVM_Task()
time.sleep(scatter_secs)
|
<commit_before><commit_msg>Add a new example of powering on many VMs at once<commit_after>
|
#!/usr/bin/python
import time
import sys
from psphere.client import Client
from psphere.managedobjects import VirtualMachine
scatter_secs = 8
nodes = sys.argv[1:]
client = Client()
print("Powering on %s VMs" % len(nodes))
print("Estimated run time with %s seconds sleep between each power on: %s" %
(scatter_secs, scatter_secs*len(nodes)))
for node in nodes:
try:
vm = VirtualMachine.get(client, name=node, properties=["name", "runtime"])
except ObjectNotFoundError:
print("WARNING: Could not find VM with name %s" % node)
pass
print("Powering on %s" % vm.name)
if vm.runtime.powerState == "poweredOn":
print("%s is already powered on." % vm.name)
continue
task = vm.PowerOnVM_Task()
time.sleep(scatter_secs)
|
Add a new example of powering on many VMs at once#!/usr/bin/python
import time
import sys
from psphere.client import Client
from psphere.managedobjects import VirtualMachine
scatter_secs = 8
nodes = sys.argv[1:]
client = Client()
print("Powering on %s VMs" % len(nodes))
print("Estimated run time with %s seconds sleep between each power on: %s" %
(scatter_secs, scatter_secs*len(nodes)))
for node in nodes:
try:
vm = VirtualMachine.get(client, name=node, properties=["name", "runtime"])
except ObjectNotFoundError:
print("WARNING: Could not find VM with name %s" % node)
pass
print("Powering on %s" % vm.name)
if vm.runtime.powerState == "poweredOn":
print("%s is already powered on." % vm.name)
continue
task = vm.PowerOnVM_Task()
time.sleep(scatter_secs)
|
<commit_before><commit_msg>Add a new example of powering on many VMs at once<commit_after>#!/usr/bin/python
import time
import sys
from psphere.client import Client
from psphere.managedobjects import VirtualMachine
scatter_secs = 8
nodes = sys.argv[1:]
client = Client()
print("Powering on %s VMs" % len(nodes))
print("Estimated run time with %s seconds sleep between each power on: %s" %
(scatter_secs, scatter_secs*len(nodes)))
for node in nodes:
try:
vm = VirtualMachine.get(client, name=node, properties=["name", "runtime"])
except ObjectNotFoundError:
print("WARNING: Could not find VM with name %s" % node)
pass
print("Powering on %s" % vm.name)
if vm.runtime.powerState == "poweredOn":
print("%s is already powered on." % vm.name)
continue
task = vm.PowerOnVM_Task()
time.sleep(scatter_secs)
|
|
eb77b1335afb9d9e29d39ca9ff4e3f0fa3d89a8d
|
Lib/test/test_pwd.py
|
Lib/test/test_pwd.py
|
import pwd
import string
verbose = 0
if __name__ == '__main__':
verbose = 1
entries = pwd.getpwall()
for e in entries:
name = e[0]
uid = e[2]
if verbose:
print name, uid
dbuid = pwd.getpwuid(uid)
if dbuid[0] <> name:
print 'Mismatch in pwd.getpwuid()'
dbname = pwd.getpwnam(name)
if dbname[2] <> uid:
print 'Mismatch in pwd.getpwnam()'
break
# try to get some errors
bynames = {}
byuids = {}
for n, p, u, g, gecos, d, s in entries:
bynames[n] = u
byuids[u] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while bynames.has_key(fakename):
chars = map(None, fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = string.join(map(None, chars), '')
try:
pwd.getpwnam(fakename)
except KeyError:
pass
else:
print 'fakename', fakename, 'did not except pwd.getpwnam()'
uids = byuids.keys()
uids.sort()
uids.reverse()
fakeuid = uids[0] + 1
try:
pwd.getpwuid(fakeuid)
except KeyError:
pass
else:
print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
|
Test of the pwd module
|
Test of the pwd module
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Test of the pwd module
|
import pwd
import string
verbose = 0
if __name__ == '__main__':
verbose = 1
entries = pwd.getpwall()
for e in entries:
name = e[0]
uid = e[2]
if verbose:
print name, uid
dbuid = pwd.getpwuid(uid)
if dbuid[0] <> name:
print 'Mismatch in pwd.getpwuid()'
dbname = pwd.getpwnam(name)
if dbname[2] <> uid:
print 'Mismatch in pwd.getpwnam()'
break
# try to get some errors
bynames = {}
byuids = {}
for n, p, u, g, gecos, d, s in entries:
bynames[n] = u
byuids[u] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while bynames.has_key(fakename):
chars = map(None, fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = string.join(map(None, chars), '')
try:
pwd.getpwnam(fakename)
except KeyError:
pass
else:
print 'fakename', fakename, 'did not except pwd.getpwnam()'
uids = byuids.keys()
uids.sort()
uids.reverse()
fakeuid = uids[0] + 1
try:
pwd.getpwuid(fakeuid)
except KeyError:
pass
else:
print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
|
<commit_before><commit_msg>Test of the pwd module<commit_after>
|
import pwd
import string
verbose = 0
if __name__ == '__main__':
verbose = 1
entries = pwd.getpwall()
for e in entries:
name = e[0]
uid = e[2]
if verbose:
print name, uid
dbuid = pwd.getpwuid(uid)
if dbuid[0] <> name:
print 'Mismatch in pwd.getpwuid()'
dbname = pwd.getpwnam(name)
if dbname[2] <> uid:
print 'Mismatch in pwd.getpwnam()'
break
# try to get some errors
bynames = {}
byuids = {}
for n, p, u, g, gecos, d, s in entries:
bynames[n] = u
byuids[u] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while bynames.has_key(fakename):
chars = map(None, fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = string.join(map(None, chars), '')
try:
pwd.getpwnam(fakename)
except KeyError:
pass
else:
print 'fakename', fakename, 'did not except pwd.getpwnam()'
uids = byuids.keys()
uids.sort()
uids.reverse()
fakeuid = uids[0] + 1
try:
pwd.getpwuid(fakeuid)
except KeyError:
pass
else:
print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
|
Test of the pwd moduleimport pwd
import string
verbose = 0
if __name__ == '__main__':
verbose = 1
entries = pwd.getpwall()
for e in entries:
name = e[0]
uid = e[2]
if verbose:
print name, uid
dbuid = pwd.getpwuid(uid)
if dbuid[0] <> name:
print 'Mismatch in pwd.getpwuid()'
dbname = pwd.getpwnam(name)
if dbname[2] <> uid:
print 'Mismatch in pwd.getpwnam()'
break
# try to get some errors
bynames = {}
byuids = {}
for n, p, u, g, gecos, d, s in entries:
bynames[n] = u
byuids[u] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while bynames.has_key(fakename):
chars = map(None, fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = string.join(map(None, chars), '')
try:
pwd.getpwnam(fakename)
except KeyError:
pass
else:
print 'fakename', fakename, 'did not except pwd.getpwnam()'
uids = byuids.keys()
uids.sort()
uids.reverse()
fakeuid = uids[0] + 1
try:
pwd.getpwuid(fakeuid)
except KeyError:
pass
else:
print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
|
<commit_before><commit_msg>Test of the pwd module<commit_after>import pwd
import string
verbose = 0
if __name__ == '__main__':
verbose = 1
entries = pwd.getpwall()
for e in entries:
name = e[0]
uid = e[2]
if verbose:
print name, uid
dbuid = pwd.getpwuid(uid)
if dbuid[0] <> name:
print 'Mismatch in pwd.getpwuid()'
dbname = pwd.getpwnam(name)
if dbname[2] <> uid:
print 'Mismatch in pwd.getpwnam()'
break
# try to get some errors
bynames = {}
byuids = {}
for n, p, u, g, gecos, d, s in entries:
bynames[n] = u
byuids[u] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while bynames.has_key(fakename):
chars = map(None, fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = string.join(map(None, chars), '')
try:
pwd.getpwnam(fakename)
except KeyError:
pass
else:
print 'fakename', fakename, 'did not except pwd.getpwnam()'
uids = byuids.keys()
uids.sort()
uids.reverse()
fakeuid = uids[0] + 1
try:
pwd.getpwuid(fakeuid)
except KeyError:
pass
else:
print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
|
|
ea5bd5fe05c6d2a9580f5ca6c2238971c28e36bc
|
apps/plea/migrations/0026_caseoffencefilter.py
|
apps/plea/migrations/0026_caseoffencefilter.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('plea', '0025_auto_20151210_1526'),
]
operations = [
migrations.CreateModel(
name='CaseOffenceFilter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filter_match', models.CharField(max_length=20)),
('description', models.CharField(max_length=500, null=True, blank=True)),
],
),
]
|
Add migration for CaseOffenceFilter model
|
Add migration for CaseOffenceFilter model
|
Python
|
mit
|
ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas
|
Add migration for CaseOffenceFilter model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('plea', '0025_auto_20151210_1526'),
]
operations = [
migrations.CreateModel(
name='CaseOffenceFilter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filter_match', models.CharField(max_length=20)),
('description', models.CharField(max_length=500, null=True, blank=True)),
],
),
]
|
<commit_before><commit_msg>Add migration for CaseOffenceFilter model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('plea', '0025_auto_20151210_1526'),
]
operations = [
migrations.CreateModel(
name='CaseOffenceFilter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filter_match', models.CharField(max_length=20)),
('description', models.CharField(max_length=500, null=True, blank=True)),
],
),
]
|
Add migration for CaseOffenceFilter model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('plea', '0025_auto_20151210_1526'),
]
operations = [
migrations.CreateModel(
name='CaseOffenceFilter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filter_match', models.CharField(max_length=20)),
('description', models.CharField(max_length=500, null=True, blank=True)),
],
),
]
|
<commit_before><commit_msg>Add migration for CaseOffenceFilter model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('plea', '0025_auto_20151210_1526'),
]
operations = [
migrations.CreateModel(
name='CaseOffenceFilter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filter_match', models.CharField(max_length=20)),
('description', models.CharField(max_length=500, null=True, blank=True)),
],
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.