commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7a0bb642a4dccd3b6ada7cace0583aed54caedec
|
tests/test_settings.py
|
tests/test_settings.py
|
import os
import unittest
from swf.settings import from_env
AWS_ENV_KEYS = (
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_DEFAULT_REGION",
)
class TestSettings(unittest.TestCase):
def setUp(self):
self.oldies = {}
for key in AWS_ENV_KEYS:
self.oldies[key] = os.environ.get(key)
os.environ.pop(key, None)
def tearDown(self):
for key in AWS_ENV_KEYS:
if self.oldies[key]:
os.environ[key] = self.oldies[key]
else:
os.environ.pop(key, None)
def test_get_aws_settings_with_access_key_id(self):
"""
If AWS_ACCESS_KEY_ID is set, get all 3 params from env.
"""
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {
"aws_access_key_id": "foo",
"aws_secret_access_key": "bar",
"region": "eu-west-1",
})
# TODO: change that, this is weird and confuses me as for AWS_DEFAULT_REGION handling
def test_get_aws_settings_without_access_key_id(self):
"""
If AWS_DEFAULT_REGION is not set, don't get anything from env.
"""
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {})
|
Add some tests to demonstrate current behaviour
|
Add some tests to demonstrate current behaviour
|
Python
|
mit
|
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
|
Add some tests to demonstrate current behaviour
|
import os
import unittest
from swf.settings import from_env
AWS_ENV_KEYS = (
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_DEFAULT_REGION",
)
class TestSettings(unittest.TestCase):
def setUp(self):
self.oldies = {}
for key in AWS_ENV_KEYS:
self.oldies[key] = os.environ.get(key)
os.environ.pop(key, None)
def tearDown(self):
for key in AWS_ENV_KEYS:
if self.oldies[key]:
os.environ[key] = self.oldies[key]
else:
os.environ.pop(key, None)
def test_get_aws_settings_with_access_key_id(self):
"""
If AWS_ACCESS_KEY_ID is set, get all 3 params from env.
"""
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {
"aws_access_key_id": "foo",
"aws_secret_access_key": "bar",
"region": "eu-west-1",
})
# TODO: change that, this is weird and confuses me as for AWS_DEFAULT_REGION handling
def test_get_aws_settings_without_access_key_id(self):
"""
If AWS_DEFAULT_REGION is not set, don't get anything from env.
"""
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {})
|
<commit_before><commit_msg>Add some tests to demonstrate current behaviour<commit_after>
|
import os
import unittest
from swf.settings import from_env
AWS_ENV_KEYS = (
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_DEFAULT_REGION",
)
class TestSettings(unittest.TestCase):
def setUp(self):
self.oldies = {}
for key in AWS_ENV_KEYS:
self.oldies[key] = os.environ.get(key)
os.environ.pop(key, None)
def tearDown(self):
for key in AWS_ENV_KEYS:
if self.oldies[key]:
os.environ[key] = self.oldies[key]
else:
os.environ.pop(key, None)
def test_get_aws_settings_with_access_key_id(self):
"""
If AWS_ACCESS_KEY_ID is set, get all 3 params from env.
"""
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {
"aws_access_key_id": "foo",
"aws_secret_access_key": "bar",
"region": "eu-west-1",
})
# TODO: change that, this is weird and confuses me as for AWS_DEFAULT_REGION handling
def test_get_aws_settings_without_access_key_id(self):
"""
If AWS_DEFAULT_REGION is not set, don't get anything from env.
"""
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {})
|
Add some tests to demonstrate current behaviourimport os
import unittest
from swf.settings import from_env
AWS_ENV_KEYS = (
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_DEFAULT_REGION",
)
class TestSettings(unittest.TestCase):
def setUp(self):
self.oldies = {}
for key in AWS_ENV_KEYS:
self.oldies[key] = os.environ.get(key)
os.environ.pop(key, None)
def tearDown(self):
for key in AWS_ENV_KEYS:
if self.oldies[key]:
os.environ[key] = self.oldies[key]
else:
os.environ.pop(key, None)
def test_get_aws_settings_with_access_key_id(self):
"""
If AWS_ACCESS_KEY_ID is set, get all 3 params from env.
"""
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {
"aws_access_key_id": "foo",
"aws_secret_access_key": "bar",
"region": "eu-west-1",
})
# TODO: change that, this is weird and confuses me as for AWS_DEFAULT_REGION handling
def test_get_aws_settings_without_access_key_id(self):
"""
If AWS_DEFAULT_REGION is not set, don't get anything from env.
"""
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {})
|
<commit_before><commit_msg>Add some tests to demonstrate current behaviour<commit_after>import os
import unittest
from swf.settings import from_env
AWS_ENV_KEYS = (
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_DEFAULT_REGION",
)
class TestSettings(unittest.TestCase):
def setUp(self):
self.oldies = {}
for key in AWS_ENV_KEYS:
self.oldies[key] = os.environ.get(key)
os.environ.pop(key, None)
def tearDown(self):
for key in AWS_ENV_KEYS:
if self.oldies[key]:
os.environ[key] = self.oldies[key]
else:
os.environ.pop(key, None)
def test_get_aws_settings_with_access_key_id(self):
"""
If AWS_ACCESS_KEY_ID is set, get all 3 params from env.
"""
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {
"aws_access_key_id": "foo",
"aws_secret_access_key": "bar",
"region": "eu-west-1",
})
# TODO: change that, this is weird and confuses me as for AWS_DEFAULT_REGION handling
def test_get_aws_settings_without_access_key_id(self):
"""
If AWS_DEFAULT_REGION is not set, don't get anything from env.
"""
os.environ["AWS_DEFAULT_REGION"] = "eu-west-1"
_settings = from_env()
self.assertEqual(_settings, {})
|
|
df17568e00d1b0adbd327d58918d71b3bf195ced
|
py/valid-triangle-number.py
|
py/valid-triangle-number.py
|
from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
Add py solution for 611. Valid Triangle Number
|
Add py solution for 611. Valid Triangle Number
611. Valid Triangle Number: https://leetcode.com/problems/valid-triangle-number/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 611. Valid Triangle Number
611. Valid Triangle Number: https://leetcode.com/problems/valid-triangle-number/
|
from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
<commit_before><commit_msg>Add py solution for 611. Valid Triangle Number
611. Valid Triangle Number: https://leetcode.com/problems/valid-triangle-number/<commit_after>
|
from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
Add py solution for 611. Valid Triangle Number
611. Valid Triangle Number: https://leetcode.com/problems/valid-triangle-number/from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
<commit_before><commit_msg>Add py solution for 611. Valid Triangle Number
611. Valid Triangle Number: https://leetcode.com/problems/valid-triangle-number/<commit_after>from collections import Counter
class Solution(object):
def triangleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums = filter(None, nums)
if not nums:
return 0
c = Counter(nums)
N = max(nums)
buckets = [0] * (N + 1)
for k, cnt in c.iteritems():
buckets[k] += cnt
for i in xrange(1, N + 1):
buckets[i] += buckets[i - 1]
s = sorted(c)
ans = 0
for i, n1 in enumerate(s):
for j in xrange(i):
n2 = s[j]
n1_n2 = n1 + n2
ans += c[n1] * c[n2] * (buckets[min(n1_n2 - 1, N)] - buckets[n1])
ans += c[n2] * (c[n1] - 1) * c[n1] / 2
ans += c[n1] * (c[n1] - 1) * (c[n1] - 2) / 6
ans += c[n1] * (c[n1] - 1) / 2 * (buckets[min(n1 * 2 - 1, N)] - buckets[n1])
return ans
|
|
35ad1b2bc237ea0a5783750d5980040f9cbdec92
|
source/sql/alembic/versions/56c1c7a19078_add_visible_parameter.py
|
source/sql/alembic/versions/56c1c7a19078_add_visible_parameter.py
|
"""add visible parameter
Revision ID: 56c1c7a19078
Revises: 46168c7abc89
Create Date: 2018-11-13 15:33:38.146755
"""
# revision identifiers, used by Alembic.
revision = '56c1c7a19078'
down_revision = '46168c7abc89'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
from sqlalchemy.dialects import postgresql
def upgrade():
### Add new flag for input/ouput visibility
op.add_column('way', sa.Column('visible', sa.BOOLEAN(), nullable=False), schema='georef')
def downgrade():
op.drop_column('way', 'visible', schema='georef')
|
Add visible parameter (Bool) in georef.way Bdd
|
Add visible parameter (Bool) in georef.way Bdd
|
Python
|
agpl-3.0
|
xlqian/navitia,Tisseo/navitia,kinnou02/navitia,patochectp/navitia,Tisseo/navitia,pbougue/navitia,xlqian/navitia,CanalTP/navitia,xlqian/navitia,CanalTP/navitia,CanalTP/navitia,Tisseo/navitia,CanalTP/navitia,Tisseo/navitia,kinnou02/navitia,Tisseo/navitia,CanalTP/navitia,pbougue/navitia,patochectp/navitia,xlqian/navitia,kinnou02/navitia,patochectp/navitia,pbougue/navitia,patochectp/navitia,kinnou02/navitia,pbougue/navitia,xlqian/navitia
|
Add visible parameter (Bool) in georef.way Bdd
|
"""add visible parameter
Revision ID: 56c1c7a19078
Revises: 46168c7abc89
Create Date: 2018-11-13 15:33:38.146755
"""
# revision identifiers, used by Alembic.
revision = '56c1c7a19078'
down_revision = '46168c7abc89'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
from sqlalchemy.dialects import postgresql
def upgrade():
### Add new flag for input/ouput visibility
op.add_column('way', sa.Column('visible', sa.BOOLEAN(), nullable=False), schema='georef')
def downgrade():
op.drop_column('way', 'visible', schema='georef')
|
<commit_before><commit_msg>Add visible parameter (Bool) in georef.way Bdd<commit_after>
|
"""add visible parameter
Revision ID: 56c1c7a19078
Revises: 46168c7abc89
Create Date: 2018-11-13 15:33:38.146755
"""
# revision identifiers, used by Alembic.
revision = '56c1c7a19078'
down_revision = '46168c7abc89'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
from sqlalchemy.dialects import postgresql
def upgrade():
### Add new flag for input/ouput visibility
op.add_column('way', sa.Column('visible', sa.BOOLEAN(), nullable=False), schema='georef')
def downgrade():
op.drop_column('way', 'visible', schema='georef')
|
Add visible parameter (Bool) in georef.way Bdd"""add visible parameter
Revision ID: 56c1c7a19078
Revises: 46168c7abc89
Create Date: 2018-11-13 15:33:38.146755
"""
# revision identifiers, used by Alembic.
revision = '56c1c7a19078'
down_revision = '46168c7abc89'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
from sqlalchemy.dialects import postgresql
def upgrade():
### Add new flag for input/ouput visibility
op.add_column('way', sa.Column('visible', sa.BOOLEAN(), nullable=False), schema='georef')
def downgrade():
op.drop_column('way', 'visible', schema='georef')
|
<commit_before><commit_msg>Add visible parameter (Bool) in georef.way Bdd<commit_after>"""add visible parameter
Revision ID: 56c1c7a19078
Revises: 46168c7abc89
Create Date: 2018-11-13 15:33:38.146755
"""
# revision identifiers, used by Alembic.
revision = '56c1c7a19078'
down_revision = '46168c7abc89'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
from sqlalchemy.dialects import postgresql
def upgrade():
### Add new flag for input/ouput visibility
op.add_column('way', sa.Column('visible', sa.BOOLEAN(), nullable=False), schema='georef')
def downgrade():
op.drop_column('way', 'visible', schema='georef')
|
|
5e2ebfe1d45a542f7f8b480e855890140cfbc1fb
|
samples/vmc/networks_nsxt/hello_world.py
|
samples/vmc/networks_nsxt/hello_world.py
|
#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2019. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import argparse
import pprint
from com.vmware.nsx_policy_client_for_vmc import (
create_nsx_policy_client_for_vmc)
class AuthExample(object):
"""
Demonstrates how to authenticate to VMC using the NSX-T SDK
and perform a simple read operation.
Sample Prerequisites:
- An organization associated with the calling user.
- A SDDC in the organization
"""
def __init__(self):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r', '--refresh-token',
required=True,
help='VMware Cloud API refresh token')
parser.add_argument('-o', '--org-id',
required=True,
help='Organization identifier.')
parser.add_argument('-s', '--sddc-id',
required=True,
help='Sddc Identifier.')
args = parser.parse_args()
self.org_id = args.org_id
self.sddc_id = args.sddc_id
self.vmc_client = create_nsx_policy_client_for_vmc(
args.refresh_token, args.org_id, args.sddc_id)
def get_domains(self):
print('\n# Get Domains: List network domains:')
domains = self.vmc_client.infra.Domains.list()
pprint.pprint(domains)
def main():
auth_example = AuthExample()
auth_example.get_domains()
if __name__ == '__main__':
main()
|
Add networks_nsxt dir, simple example
|
Add networks_nsxt dir, simple example
Define a directory for NSX-T sample code. Add a basic
example that shows how to authenticate to VMC/NSX-T and
make a simple read call.
Signed-Off-By: Gordon Good <357c6cd042c65e38a52fc9cc40c47924d92a9c54@vmware.com>
|
Python
|
mit
|
tianhao64/vsphere-automation-sdk-python,tianhao64/vsphere-automation-sdk-python
|
Add networks_nsxt dir, simple example
Define a directory for NSX-T sample code. Add a basic
example that shows how to authenticate to VMC/NSX-T and
make a simple read call.
Signed-Off-By: Gordon Good <357c6cd042c65e38a52fc9cc40c47924d92a9c54@vmware.com>
|
#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2019. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import argparse
import pprint
from com.vmware.nsx_policy_client_for_vmc import (
create_nsx_policy_client_for_vmc)
class AuthExample(object):
"""
Demonstrates how to authenticate to VMC using the NSX-T SDK
and perform a simple read operation.
Sample Prerequisites:
- An organization associated with the calling user.
- A SDDC in the organization
"""
def __init__(self):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r', '--refresh-token',
required=True,
help='VMware Cloud API refresh token')
parser.add_argument('-o', '--org-id',
required=True,
help='Organization identifier.')
parser.add_argument('-s', '--sddc-id',
required=True,
help='Sddc Identifier.')
args = parser.parse_args()
self.org_id = args.org_id
self.sddc_id = args.sddc_id
self.vmc_client = create_nsx_policy_client_for_vmc(
args.refresh_token, args.org_id, args.sddc_id)
def get_domains(self):
print('\n# Get Domains: List network domains:')
domains = self.vmc_client.infra.Domains.list()
pprint.pprint(domains)
def main():
auth_example = AuthExample()
auth_example.get_domains()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add networks_nsxt dir, simple example
Define a directory for NSX-T sample code. Add a basic
example that shows how to authenticate to VMC/NSX-T and
make a simple read call.
Signed-Off-By: Gordon Good <357c6cd042c65e38a52fc9cc40c47924d92a9c54@vmware.com><commit_after>
|
#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2019. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import argparse
import pprint
from com.vmware.nsx_policy_client_for_vmc import (
create_nsx_policy_client_for_vmc)
class AuthExample(object):
"""
Demonstrates how to authenticate to VMC using the NSX-T SDK
and perform a simple read operation.
Sample Prerequisites:
- An organization associated with the calling user.
- A SDDC in the organization
"""
def __init__(self):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r', '--refresh-token',
required=True,
help='VMware Cloud API refresh token')
parser.add_argument('-o', '--org-id',
required=True,
help='Organization identifier.')
parser.add_argument('-s', '--sddc-id',
required=True,
help='Sddc Identifier.')
args = parser.parse_args()
self.org_id = args.org_id
self.sddc_id = args.sddc_id
self.vmc_client = create_nsx_policy_client_for_vmc(
args.refresh_token, args.org_id, args.sddc_id)
def get_domains(self):
print('\n# Get Domains: List network domains:')
domains = self.vmc_client.infra.Domains.list()
pprint.pprint(domains)
def main():
auth_example = AuthExample()
auth_example.get_domains()
if __name__ == '__main__':
main()
|
Add networks_nsxt dir, simple example
Define a directory for NSX-T sample code. Add a basic
example that shows how to authenticate to VMC/NSX-T and
make a simple read call.
Signed-Off-By: Gordon Good <357c6cd042c65e38a52fc9cc40c47924d92a9c54@vmware.com>#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2019. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import argparse
import pprint
from com.vmware.nsx_policy_client_for_vmc import (
create_nsx_policy_client_for_vmc)
class AuthExample(object):
"""
Demonstrates how to authenticate to VMC using the NSX-T SDK
and perform a simple read operation.
Sample Prerequisites:
- An organization associated with the calling user.
- A SDDC in the organization
"""
def __init__(self):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r', '--refresh-token',
required=True,
help='VMware Cloud API refresh token')
parser.add_argument('-o', '--org-id',
required=True,
help='Organization identifier.')
parser.add_argument('-s', '--sddc-id',
required=True,
help='Sddc Identifier.')
args = parser.parse_args()
self.org_id = args.org_id
self.sddc_id = args.sddc_id
self.vmc_client = create_nsx_policy_client_for_vmc(
args.refresh_token, args.org_id, args.sddc_id)
def get_domains(self):
print('\n# Get Domains: List network domains:')
domains = self.vmc_client.infra.Domains.list()
pprint.pprint(domains)
def main():
auth_example = AuthExample()
auth_example.get_domains()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add networks_nsxt dir, simple example
Define a directory for NSX-T sample code. Add a basic
example that shows how to authenticate to VMC/NSX-T and
make a simple read call.
Signed-Off-By: Gordon Good <357c6cd042c65e38a52fc9cc40c47924d92a9c54@vmware.com><commit_after>#!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2019. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import argparse
import pprint
from com.vmware.nsx_policy_client_for_vmc import (
create_nsx_policy_client_for_vmc)
class AuthExample(object):
"""
Demonstrates how to authenticate to VMC using the NSX-T SDK
and perform a simple read operation.
Sample Prerequisites:
- An organization associated with the calling user.
- A SDDC in the organization
"""
def __init__(self):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r', '--refresh-token',
required=True,
help='VMware Cloud API refresh token')
parser.add_argument('-o', '--org-id',
required=True,
help='Organization identifier.')
parser.add_argument('-s', '--sddc-id',
required=True,
help='Sddc Identifier.')
args = parser.parse_args()
self.org_id = args.org_id
self.sddc_id = args.sddc_id
self.vmc_client = create_nsx_policy_client_for_vmc(
args.refresh_token, args.org_id, args.sddc_id)
def get_domains(self):
print('\n# Get Domains: List network domains:')
domains = self.vmc_client.infra.Domains.list()
pprint.pprint(domains)
def main():
auth_example = AuthExample()
auth_example.get_domains()
if __name__ == '__main__':
main()
|
|
dd1e7b06c4fbb31da91a14a57d98b4910eb4a351
|
candidates/tests/helpers.py
|
candidates/tests/helpers.py
|
from __future__ import print_function
import difflib
import pprint
import sys
def p(*args):
"""A helper for printing to stderr"""
print(file=sys.stderr, *args)
def equal_arg(arg1, arg2):
"""Return True if the args are equal, False otherwise
If the arguments aren't equal under ==, return True, otherwise
return False and try to output to stderr a diff of the
pretty-printed objects."""
if arg1 == arg2:
return True
# This is more or less taken from assertDictEqual in
# django/utils/unittest/case.py:
args1_lines = pprint.pformat(arg1).splitlines()
args2_lines = pprint.pformat(arg2).splitlines()
diff = difflib.ndiff(args1_lines, args2_lines)
p("Found the following differences: ====================================")
for line in diff:
p(line)
p("=====================================================================")
return False
def equal_call_args(args1, args2):
"""Return True if two sequences of arguments are equal
Otherwise return False and output a diff of the first non-equal
arguments to stderr."""
if len(args1) != len(args2):
message = "The argument lists were different lengths: {0} and {1}"
p(message.format(len(args1), len(args2)))
for i, arg1 in enumerate(args1):
if not equal_arg(arg1, args2[i]):
return False
return True
|
Add equal_call_args for helpful diffs of args
|
Add equal_call_args for helpful diffs of args
This is particularly useful because mock's assert_called_with produces
really unhelpful output if the the expectation doesn't match.
|
Python
|
agpl-3.0
|
neavouli/yournextrepresentative,neavouli/yournextrepresentative,mhl/yournextmp-popit,neavouli/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mhl/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mhl/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative
|
Add equal_call_args for helpful diffs of args
This is particularly useful because mock's assert_called_with produces
really unhelpful output if the the expectation doesn't match.
|
from __future__ import print_function
import difflib
import pprint
import sys
def p(*args):
"""A helper for printing to stderr"""
print(file=sys.stderr, *args)
def equal_arg(arg1, arg2):
"""Return True if the args are equal, False otherwise
If the arguments aren't equal under ==, return True, otherwise
return False and try to output to stderr a diff of the
pretty-printed objects."""
if arg1 == arg2:
return True
# This is more or less taken from assertDictEqual in
# django/utils/unittest/case.py:
args1_lines = pprint.pformat(arg1).splitlines()
args2_lines = pprint.pformat(arg2).splitlines()
diff = difflib.ndiff(args1_lines, args2_lines)
p("Found the following differences: ====================================")
for line in diff:
p(line)
p("=====================================================================")
return False
def equal_call_args(args1, args2):
"""Return True if two sequences of arguments are equal
Otherwise return False and output a diff of the first non-equal
arguments to stderr."""
if len(args1) != len(args2):
message = "The argument lists were different lengths: {0} and {1}"
p(message.format(len(args1), len(args2)))
for i, arg1 in enumerate(args1):
if not equal_arg(arg1, args2[i]):
return False
return True
|
<commit_before><commit_msg>Add equal_call_args for helpful diffs of args
This is particularly useful because mock's assert_called_with produces
really unhelpful output if the the expectation doesn't match.<commit_after>
|
from __future__ import print_function
import difflib
import pprint
import sys
def p(*args):
"""A helper for printing to stderr"""
print(file=sys.stderr, *args)
def equal_arg(arg1, arg2):
"""Return True if the args are equal, False otherwise
If the arguments aren't equal under ==, return True, otherwise
return False and try to output to stderr a diff of the
pretty-printed objects."""
if arg1 == arg2:
return True
# This is more or less taken from assertDictEqual in
# django/utils/unittest/case.py:
args1_lines = pprint.pformat(arg1).splitlines()
args2_lines = pprint.pformat(arg2).splitlines()
diff = difflib.ndiff(args1_lines, args2_lines)
p("Found the following differences: ====================================")
for line in diff:
p(line)
p("=====================================================================")
return False
def equal_call_args(args1, args2):
"""Return True if two sequences of arguments are equal
Otherwise return False and output a diff of the first non-equal
arguments to stderr."""
if len(args1) != len(args2):
message = "The argument lists were different lengths: {0} and {1}"
p(message.format(len(args1), len(args2)))
for i, arg1 in enumerate(args1):
if not equal_arg(arg1, args2[i]):
return False
return True
|
Add equal_call_args for helpful diffs of args
This is particularly useful because mock's assert_called_with produces
really unhelpful output if the the expectation doesn't match.from __future__ import print_function
import difflib
import pprint
import sys
def p(*args):
"""A helper for printing to stderr"""
print(file=sys.stderr, *args)
def equal_arg(arg1, arg2):
"""Return True if the args are equal, False otherwise
If the arguments aren't equal under ==, return True, otherwise
return False and try to output to stderr a diff of the
pretty-printed objects."""
if arg1 == arg2:
return True
# This is more or less taken from assertDictEqual in
# django/utils/unittest/case.py:
args1_lines = pprint.pformat(arg1).splitlines()
args2_lines = pprint.pformat(arg2).splitlines()
diff = difflib.ndiff(args1_lines, args2_lines)
p("Found the following differences: ====================================")
for line in diff:
p(line)
p("=====================================================================")
return False
def equal_call_args(args1, args2):
"""Return True if two sequences of arguments are equal
Otherwise return False and output a diff of the first non-equal
arguments to stderr."""
if len(args1) != len(args2):
message = "The argument lists were different lengths: {0} and {1}"
p(message.format(len(args1), len(args2)))
for i, arg1 in enumerate(args1):
if not equal_arg(arg1, args2[i]):
return False
return True
|
<commit_before><commit_msg>Add equal_call_args for helpful diffs of args
This is particularly useful because mock's assert_called_with produces
really unhelpful output if the the expectation doesn't match.<commit_after>from __future__ import print_function
import difflib
import pprint
import sys
def p(*args):
"""A helper for printing to stderr"""
print(file=sys.stderr, *args)
def equal_arg(arg1, arg2):
"""Return True if the args are equal, False otherwise
If the arguments aren't equal under ==, return True, otherwise
return False and try to output to stderr a diff of the
pretty-printed objects."""
if arg1 == arg2:
return True
# This is more or less taken from assertDictEqual in
# django/utils/unittest/case.py:
args1_lines = pprint.pformat(arg1).splitlines()
args2_lines = pprint.pformat(arg2).splitlines()
diff = difflib.ndiff(args1_lines, args2_lines)
p("Found the following differences: ====================================")
for line in diff:
p(line)
p("=====================================================================")
return False
def equal_call_args(args1, args2):
"""Return True if two sequences of arguments are equal
Otherwise return False and output a diff of the first non-equal
arguments to stderr."""
if len(args1) != len(args2):
message = "The argument lists were different lengths: {0} and {1}"
p(message.format(len(args1), len(args2)))
for i, arg1 in enumerate(args1):
if not equal_arg(arg1, args2[i]):
return False
return True
|
|
02f69a304a0e55a8f3164b7b1fbafbf0ba7f07b4
|
pyboard/GPIO_interrupt_updater.py
|
pyboard/GPIO_interrupt_updater.py
|
from pyb import UART
from pyb import ExtInt
from pyb import Pin
from micropyGPS import MicropyGPS
# Global Flag to Start GPS data Processing
new_data = False
# Callback Function
def pps_callback(line):
print("Updated GPS Object...")
global new_data # Use Global to trigger update
new_data = True
print('GPS Interrupt Tester')
# Instantiate the micropyGPS object
my_gps = MicropyGPS()
# Setup the connection to your GPS here
# This example uses UART 3 with RX on pin Y10
# Baudrate is 9600bps, with the standard 8 bits, 1 stop bit, no parity
# Also made the buffer size very large (1000 chars) to accommodate all the characters that stack up
# each second
uart = UART(3, 9600, read_buf_len=1000)
# Create an external interrupt on pin X8
pps_pin = pyb.Pin.board.X8
extint = pyb.ExtInt(pps_pin, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, pps_callback)
# Main Infinite Loop
while 1:
# Do Other Stuff Here.......
# Update the GPS Object when flag is tripped
if new_data:
while uart.any():
my_gps.update(chr(uart.readchar())) # Note the conversion to to chr, UART outputs ints normally
print('UTC Timestamp:', my_gps.timestamp)
print('Date:', my_gps.date_string('long'))
print('Latitude:', my_gps.latitude_string())
print('Longitude:', my_gps.longitude_string())
print('Horizontal Dilution of Precision:', my_gps.hdop)
print()
new_data = False # Clear the flag
|
Add external interrupt updater example
|
Add external interrupt updater example
|
Python
|
mit
|
inmcm/micropyGPS
|
Add external interrupt updater example
|
from pyb import UART
from pyb import ExtInt
from pyb import Pin
from micropyGPS import MicropyGPS
# Global Flag to Start GPS data Processing
new_data = False
# Callback Function
def pps_callback(line):
print("Updated GPS Object...")
global new_data # Use Global to trigger update
new_data = True
print('GPS Interrupt Tester')
# Instantiate the micropyGPS object
my_gps = MicropyGPS()
# Setup the connection to your GPS here
# This example uses UART 3 with RX on pin Y10
# Baudrate is 9600bps, with the standard 8 bits, 1 stop bit, no parity
# Also made the buffer size very large (1000 chars) to accommodate all the characters that stack up
# each second
uart = UART(3, 9600, read_buf_len=1000)
# Create an external interrupt on pin X8
pps_pin = pyb.Pin.board.X8
extint = pyb.ExtInt(pps_pin, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, pps_callback)
# Main Infinite Loop
while 1:
# Do Other Stuff Here.......
# Update the GPS Object when flag is tripped
if new_data:
while uart.any():
my_gps.update(chr(uart.readchar())) # Note the conversion to to chr, UART outputs ints normally
print('UTC Timestamp:', my_gps.timestamp)
print('Date:', my_gps.date_string('long'))
print('Latitude:', my_gps.latitude_string())
print('Longitude:', my_gps.longitude_string())
print('Horizontal Dilution of Precision:', my_gps.hdop)
print()
new_data = False # Clear the flag
|
<commit_before><commit_msg>Add external interrupt updater example<commit_after>
|
from pyb import UART
from pyb import ExtInt
from pyb import Pin
from micropyGPS import MicropyGPS
# Global Flag to Start GPS data Processing
new_data = False
# Callback Function
def pps_callback(line):
print("Updated GPS Object...")
global new_data # Use Global to trigger update
new_data = True
print('GPS Interrupt Tester')
# Instantiate the micropyGPS object
my_gps = MicropyGPS()
# Setup the connection to your GPS here
# This example uses UART 3 with RX on pin Y10
# Baudrate is 9600bps, with the standard 8 bits, 1 stop bit, no parity
# Also made the buffer size very large (1000 chars) to accommodate all the characters that stack up
# each second
uart = UART(3, 9600, read_buf_len=1000)
# Create an external interrupt on pin X8
pps_pin = pyb.Pin.board.X8
extint = pyb.ExtInt(pps_pin, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, pps_callback)
# Main Infinite Loop
while 1:
# Do Other Stuff Here.......
# Update the GPS Object when flag is tripped
if new_data:
while uart.any():
my_gps.update(chr(uart.readchar())) # Note the conversion to to chr, UART outputs ints normally
print('UTC Timestamp:', my_gps.timestamp)
print('Date:', my_gps.date_string('long'))
print('Latitude:', my_gps.latitude_string())
print('Longitude:', my_gps.longitude_string())
print('Horizontal Dilution of Precision:', my_gps.hdop)
print()
new_data = False # Clear the flag
|
Add external interrupt updater examplefrom pyb import UART
from pyb import ExtInt
from pyb import Pin
from micropyGPS import MicropyGPS
# Global Flag to Start GPS data Processing
new_data = False
# Callback Function
def pps_callback(line):
print("Updated GPS Object...")
global new_data # Use Global to trigger update
new_data = True
print('GPS Interrupt Tester')
# Instantiate the micropyGPS object
my_gps = MicropyGPS()
# Setup the connection to your GPS here
# This example uses UART 3 with RX on pin Y10
# Baudrate is 9600bps, with the standard 8 bits, 1 stop bit, no parity
# Also made the buffer size very large (1000 chars) to accommodate all the characters that stack up
# each second
uart = UART(3, 9600, read_buf_len=1000)
# Create an external interrupt on pin X8
pps_pin = pyb.Pin.board.X8
extint = pyb.ExtInt(pps_pin, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, pps_callback)
# Main Infinite Loop
while 1:
# Do Other Stuff Here.......
# Update the GPS Object when flag is tripped
if new_data:
while uart.any():
my_gps.update(chr(uart.readchar())) # Note the conversion to to chr, UART outputs ints normally
print('UTC Timestamp:', my_gps.timestamp)
print('Date:', my_gps.date_string('long'))
print('Latitude:', my_gps.latitude_string())
print('Longitude:', my_gps.longitude_string())
print('Horizontal Dilution of Precision:', my_gps.hdop)
print()
new_data = False # Clear the flag
|
<commit_before><commit_msg>Add external interrupt updater example<commit_after>from pyb import UART
from pyb import ExtInt
from pyb import Pin
from micropyGPS import MicropyGPS
# Global Flag to Start GPS data Processing
new_data = False
# Callback Function
def pps_callback(line):
print("Updated GPS Object...")
global new_data # Use Global to trigger update
new_data = True
print('GPS Interrupt Tester')
# Instantiate the micropyGPS object
my_gps = MicropyGPS()
# Setup the connection to your GPS here
# This example uses UART 3 with RX on pin Y10
# Baudrate is 9600bps, with the standard 8 bits, 1 stop bit, no parity
# Also made the buffer size very large (1000 chars) to accommodate all the characters that stack up
# each second
uart = UART(3, 9600, read_buf_len=1000)
# Create an external interrupt on pin X8
pps_pin = pyb.Pin.board.X8
extint = pyb.ExtInt(pps_pin, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, pps_callback)
# Main Infinite Loop
while 1:
# Do Other Stuff Here.......
# Update the GPS Object when flag is tripped
if new_data:
while uart.any():
my_gps.update(chr(uart.readchar())) # Note the conversion to to chr, UART outputs ints normally
print('UTC Timestamp:', my_gps.timestamp)
print('Date:', my_gps.date_string('long'))
print('Latitude:', my_gps.latitude_string())
print('Longitude:', my_gps.longitude_string())
print('Horizontal Dilution of Precision:', my_gps.hdop)
print()
new_data = False # Clear the flag
|
|
aea73dc4d5484fa8999348d4291978289ca02fe0
|
ods/spiders/ecfin_spider.py
|
ods/spiders/ecfin_spider.py
|
import urlparse
from ods.items import OdsSheet, DatasetItem, DistributionItem
from ods.spiders import OdsSpider
def flat_text(sel):
"""Return the flat text contained in a selector."""
return " ".join(s.strip() for s in sel.xpath("text()").extract())
class EcfinSurveysSpider(OdsSpider):
name = "ecfin-surveys"
start_urls = [ "http://ec.europa.eu/economy_finance/db_indicators/surveys/time_series/index_en.htm" ]
def parse_datasets(self, selector, response):
datasets = []
for link in selector.css(".layout-content table.big_search") \
.xpath(".//a[re:test(@href, '\.zip$')]"):
title = flat_text(link.xpath("ancestor::table[1]//th[1]"))
target = flat_text(link.xpath("ancestor::tr[1]/td[1]"))
col = len(link.xpath("ancestor::td[1]/preceding-sibling::td")) + 1
head = flat_text(link.xpath("ancestor::table[1]//th[%d]" % col))
dataset = DatasetItem()
item = DistributionItem()
dataset.add_distribution(item)
dataset["documentation_title"] = "Business and Consumer Suveys"
dataset["documentation_url"] = response.url
dataset['title'] = " - ".join((title, head, target))
dataset['uri'] = urlparse.urljoin(response.url, link.xpath("@href").extract()[0])
item['description'] = dataset['title']
item['access_url'] = dataset['uri']
datasets.append(dataset)
return datasets
|
Add example spider for DG ECFIN
|
Add example spider for DG ECFIN
|
Python
|
apache-2.0
|
tenforce/ods-scraper,tenforce/ods-scraper
|
Add example spider for DG ECFIN
|
import urlparse
from ods.items import OdsSheet, DatasetItem, DistributionItem
from ods.spiders import OdsSpider
def flat_text(sel):
"""Return the flat text contained in a selector."""
return " ".join(s.strip() for s in sel.xpath("text()").extract())
class EcfinSurveysSpider(OdsSpider):
name = "ecfin-surveys"
start_urls = [ "http://ec.europa.eu/economy_finance/db_indicators/surveys/time_series/index_en.htm" ]
def parse_datasets(self, selector, response):
datasets = []
for link in selector.css(".layout-content table.big_search") \
.xpath(".//a[re:test(@href, '\.zip$')]"):
title = flat_text(link.xpath("ancestor::table[1]//th[1]"))
target = flat_text(link.xpath("ancestor::tr[1]/td[1]"))
col = len(link.xpath("ancestor::td[1]/preceding-sibling::td")) + 1
head = flat_text(link.xpath("ancestor::table[1]//th[%d]" % col))
dataset = DatasetItem()
item = DistributionItem()
dataset.add_distribution(item)
dataset["documentation_title"] = "Business and Consumer Suveys"
dataset["documentation_url"] = response.url
dataset['title'] = " - ".join((title, head, target))
dataset['uri'] = urlparse.urljoin(response.url, link.xpath("@href").extract()[0])
item['description'] = dataset['title']
item['access_url'] = dataset['uri']
datasets.append(dataset)
return datasets
|
<commit_before><commit_msg>Add example spider for DG ECFIN<commit_after>
|
import urlparse
from ods.items import OdsSheet, DatasetItem, DistributionItem
from ods.spiders import OdsSpider
def flat_text(sel):
"""Return the flat text contained in a selector."""
return " ".join(s.strip() for s in sel.xpath("text()").extract())
class EcfinSurveysSpider(OdsSpider):
name = "ecfin-surveys"
start_urls = [ "http://ec.europa.eu/economy_finance/db_indicators/surveys/time_series/index_en.htm" ]
def parse_datasets(self, selector, response):
datasets = []
for link in selector.css(".layout-content table.big_search") \
.xpath(".//a[re:test(@href, '\.zip$')]"):
title = flat_text(link.xpath("ancestor::table[1]//th[1]"))
target = flat_text(link.xpath("ancestor::tr[1]/td[1]"))
col = len(link.xpath("ancestor::td[1]/preceding-sibling::td")) + 1
head = flat_text(link.xpath("ancestor::table[1]//th[%d]" % col))
dataset = DatasetItem()
item = DistributionItem()
dataset.add_distribution(item)
dataset["documentation_title"] = "Business and Consumer Suveys"
dataset["documentation_url"] = response.url
dataset['title'] = " - ".join((title, head, target))
dataset['uri'] = urlparse.urljoin(response.url, link.xpath("@href").extract()[0])
item['description'] = dataset['title']
item['access_url'] = dataset['uri']
datasets.append(dataset)
return datasets
|
Add example spider for DG ECFINimport urlparse
from ods.items import OdsSheet, DatasetItem, DistributionItem
from ods.spiders import OdsSpider
def flat_text(sel):
"""Return the flat text contained in a selector."""
return " ".join(s.strip() for s in sel.xpath("text()").extract())
class EcfinSurveysSpider(OdsSpider):
name = "ecfin-surveys"
start_urls = [ "http://ec.europa.eu/economy_finance/db_indicators/surveys/time_series/index_en.htm" ]
def parse_datasets(self, selector, response):
datasets = []
for link in selector.css(".layout-content table.big_search") \
.xpath(".//a[re:test(@href, '\.zip$')]"):
title = flat_text(link.xpath("ancestor::table[1]//th[1]"))
target = flat_text(link.xpath("ancestor::tr[1]/td[1]"))
col = len(link.xpath("ancestor::td[1]/preceding-sibling::td")) + 1
head = flat_text(link.xpath("ancestor::table[1]//th[%d]" % col))
dataset = DatasetItem()
item = DistributionItem()
dataset.add_distribution(item)
dataset["documentation_title"] = "Business and Consumer Suveys"
dataset["documentation_url"] = response.url
dataset['title'] = " - ".join((title, head, target))
dataset['uri'] = urlparse.urljoin(response.url, link.xpath("@href").extract()[0])
item['description'] = dataset['title']
item['access_url'] = dataset['uri']
datasets.append(dataset)
return datasets
|
<commit_before><commit_msg>Add example spider for DG ECFIN<commit_after>import urlparse
from ods.items import OdsSheet, DatasetItem, DistributionItem
from ods.spiders import OdsSpider
def flat_text(sel):
"""Return the flat text contained in a selector."""
return " ".join(s.strip() for s in sel.xpath("text()").extract())
class EcfinSurveysSpider(OdsSpider):
name = "ecfin-surveys"
start_urls = [ "http://ec.europa.eu/economy_finance/db_indicators/surveys/time_series/index_en.htm" ]
def parse_datasets(self, selector, response):
datasets = []
for link in selector.css(".layout-content table.big_search") \
.xpath(".//a[re:test(@href, '\.zip$')]"):
title = flat_text(link.xpath("ancestor::table[1]//th[1]"))
target = flat_text(link.xpath("ancestor::tr[1]/td[1]"))
col = len(link.xpath("ancestor::td[1]/preceding-sibling::td")) + 1
head = flat_text(link.xpath("ancestor::table[1]//th[%d]" % col))
dataset = DatasetItem()
item = DistributionItem()
dataset.add_distribution(item)
dataset["documentation_title"] = "Business and Consumer Suveys"
dataset["documentation_url"] = response.url
dataset['title'] = " - ".join((title, head, target))
dataset['uri'] = urlparse.urljoin(response.url, link.xpath("@href").extract()[0])
item['description'] = dataset['title']
item['access_url'] = dataset['uri']
datasets.append(dataset)
return datasets
|
|
d655a1c3e27e637e0970b9fed71875eb63f36a12
|
tools/cr/cr/actions/gyp.py
|
tools/cr/cr/actions/gyp.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
|
Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
BUG=
Review URL: https://codereview.chromium.org/218623005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,Just-D/chromium-1,bright-sparks/chromium-spacewalk,jaruba/chromium.src,ltilve/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Jonekee/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,jaruba/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,jaruba/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,Just-D/chromium-1,Fireblend/chromium-crosswalk,littlstar/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,dednal/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,dednal/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,Just-D/chromium-1,dednal/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
BUG=
Review URL: https://codereview.chromium.org/218623005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
<commit_msg>Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
BUG=
Review URL: https://codereview.chromium.org/218623005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
BUG=
Review URL: https://codereview.chromium.org/218623005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
<commit_msg>Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch
BUG=
Review URL: https://codereview.chromium.org/218623005
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to add gyp support to cr."""
import cr
import os
GYP_DEFINE_PREFIX = 'GYP_DEF_'
class GypPrepareOut(cr.PrepareOut):
"""A prepare action that runs gyp whenever you select an output directory."""
ENABLED = cr.Config.From(
GYP_GENERATORS='ninja',
GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
)
def Prepare(self):
# Collapse GYP_DEFINES from all GYP_DEF prefixes
gyp_defines = cr.context.Find('GYP_DEFINES') or ''
for key, value in cr.context.exported.items():
if key.startswith(GYP_DEFINE_PREFIX):
gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
cr.context['GYP_DEFINES'] = gyp_defines.strip()
if cr.context.verbose >= 1:
print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
cr.Host.Execute(
'{CR_SRC}/build/gyp_chromium',
'--depth={CR_SRC}',
'--check'
)
|
01f66f87c52f4ac974876dde9c9fc505193eb16d
|
verify_all.py
|
verify_all.py
|
# Apache License 2.0
import sys
import os
import glob
import verify
class VerifyAll:
def verify_file(self, file, log):
class ErrorCounter:
def __init__(self):
self.count = 0
def error_handler(self, label, msg):
self.count += 1
return False
error_counter = ErrorCounter()
urlctx = verify.UrlCtx('', file, sys.stdin)
ctx = verify.VerifyCtx(urlctx, verify.run, error_counter.error_handler)
if file == '-':
url = file
else:
url = 'file://' + file
ctx.run(urlctx, url, ctx, log)
if error_counter.count != 0:
print('Found errors in {}'.format(file))
return False
else:
return True
def verify(self):
successes = 0
logname = 'verify.log'
with open(logname, 'w') as log:
print('Writing log to {}'.format(logname))
for root, dirs, files in os.walk('.'):
for file in glob.iglob(os.path.join(root, '*.gh')):
log.write('\nVerifying {}\n'.format(file))
if self.verify_file(file, log):
successes += 1
log.write('\nverify_all done\n')
print('{} files successfully verified'.format(successes))
if __name__ == '__main__':
if len(sys.argv) != 1:
print("""
Usage: python verify_all.py
This will verify all .gh files in the current directory and subdirectories.
""")
sys.exit(1)
VerifyAll().verify()
|
Add script to verify all files.
|
Add script to verify all files.
|
Python
|
apache-2.0
|
kryptine/ghilbert,raphlinus/ghilbert,raphlinus/ghilbert,raphlinus/ghilbert,kryptine/ghilbert,raphlinus/ghilbert
|
Add script to verify all files.
|
# Apache License 2.0
import sys
import os
import glob
import verify
class VerifyAll:
def verify_file(self, file, log):
class ErrorCounter:
def __init__(self):
self.count = 0
def error_handler(self, label, msg):
self.count += 1
return False
error_counter = ErrorCounter()
urlctx = verify.UrlCtx('', file, sys.stdin)
ctx = verify.VerifyCtx(urlctx, verify.run, error_counter.error_handler)
if file == '-':
url = file
else:
url = 'file://' + file
ctx.run(urlctx, url, ctx, log)
if error_counter.count != 0:
print('Found errors in {}'.format(file))
return False
else:
return True
def verify(self):
successes = 0
logname = 'verify.log'
with open(logname, 'w') as log:
print('Writing log to {}'.format(logname))
for root, dirs, files in os.walk('.'):
for file in glob.iglob(os.path.join(root, '*.gh')):
log.write('\nVerifying {}\n'.format(file))
if self.verify_file(file, log):
successes += 1
log.write('\nverify_all done\n')
print('{} files successfully verified'.format(successes))
if __name__ == '__main__':
if len(sys.argv) != 1:
print("""
Usage: python verify_all.py
This will verify all .gh files in the current directory and subdirectories.
""")
sys.exit(1)
VerifyAll().verify()
|
<commit_before><commit_msg>Add script to verify all files.<commit_after>
|
# Apache License 2.0
import sys
import os
import glob
import verify
class VerifyAll:
def verify_file(self, file, log):
class ErrorCounter:
def __init__(self):
self.count = 0
def error_handler(self, label, msg):
self.count += 1
return False
error_counter = ErrorCounter()
urlctx = verify.UrlCtx('', file, sys.stdin)
ctx = verify.VerifyCtx(urlctx, verify.run, error_counter.error_handler)
if file == '-':
url = file
else:
url = 'file://' + file
ctx.run(urlctx, url, ctx, log)
if error_counter.count != 0:
print('Found errors in {}'.format(file))
return False
else:
return True
def verify(self):
successes = 0
logname = 'verify.log'
with open(logname, 'w') as log:
print('Writing log to {}'.format(logname))
for root, dirs, files in os.walk('.'):
for file in glob.iglob(os.path.join(root, '*.gh')):
log.write('\nVerifying {}\n'.format(file))
if self.verify_file(file, log):
successes += 1
log.write('\nverify_all done\n')
print('{} files successfully verified'.format(successes))
if __name__ == '__main__':
if len(sys.argv) != 1:
print("""
Usage: python verify_all.py
This will verify all .gh files in the current directory and subdirectories.
""")
sys.exit(1)
VerifyAll().verify()
|
Add script to verify all files.# Apache License 2.0
import sys
import os
import glob
import verify
class VerifyAll:
def verify_file(self, file, log):
class ErrorCounter:
def __init__(self):
self.count = 0
def error_handler(self, label, msg):
self.count += 1
return False
error_counter = ErrorCounter()
urlctx = verify.UrlCtx('', file, sys.stdin)
ctx = verify.VerifyCtx(urlctx, verify.run, error_counter.error_handler)
if file == '-':
url = file
else:
url = 'file://' + file
ctx.run(urlctx, url, ctx, log)
if error_counter.count != 0:
print('Found errors in {}'.format(file))
return False
else:
return True
def verify(self):
successes = 0
logname = 'verify.log'
with open(logname, 'w') as log:
print('Writing log to {}'.format(logname))
for root, dirs, files in os.walk('.'):
for file in glob.iglob(os.path.join(root, '*.gh')):
log.write('\nVerifying {}\n'.format(file))
if self.verify_file(file, log):
successes += 1
log.write('\nverify_all done\n')
print('{} files successfully verified'.format(successes))
if __name__ == '__main__':
if len(sys.argv) != 1:
print("""
Usage: python verify_all.py
This will verify all .gh files in the current directory and subdirectories.
""")
sys.exit(1)
VerifyAll().verify()
|
<commit_before><commit_msg>Add script to verify all files.<commit_after># Apache License 2.0
import sys
import os
import glob
import verify
class VerifyAll:
def verify_file(self, file, log):
class ErrorCounter:
def __init__(self):
self.count = 0
def error_handler(self, label, msg):
self.count += 1
return False
error_counter = ErrorCounter()
urlctx = verify.UrlCtx('', file, sys.stdin)
ctx = verify.VerifyCtx(urlctx, verify.run, error_counter.error_handler)
if file == '-':
url = file
else:
url = 'file://' + file
ctx.run(urlctx, url, ctx, log)
if error_counter.count != 0:
print('Found errors in {}'.format(file))
return False
else:
return True
def verify(self):
successes = 0
logname = 'verify.log'
with open(logname, 'w') as log:
print('Writing log to {}'.format(logname))
for root, dirs, files in os.walk('.'):
for file in glob.iglob(os.path.join(root, '*.gh')):
log.write('\nVerifying {}\n'.format(file))
if self.verify_file(file, log):
successes += 1
log.write('\nverify_all done\n')
print('{} files successfully verified'.format(successes))
if __name__ == '__main__':
if len(sys.argv) != 1:
print("""
Usage: python verify_all.py
This will verify all .gh files in the current directory and subdirectories.
""")
sys.exit(1)
VerifyAll().verify()
|
|
5dd02067e2529a5988dec3dda6f5c14e50bfa890
|
tests/overrides_test.py
|
tests/overrides_test.py
|
import unittest
from gi.repository import BlockDev
class OverridesTestCase(unittest.TestCase):
def test_error_proxy(self):
"""Verify that the error proxy works as expected"""
# calls via the error proxy has to be done as
# e.g. BlockDev.swap.swapon() instead of BlockDev.swap_swapon(), since
# BlockDev.swap is an ErrorProxy instance and BlockDev.swap_swapon() is
# the function it calls
# test that exceptions are correctly transformed
try:
# no need to specify priority since we are using overrides that
# define the default value for the parameter (-1)
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
# we caught the generic error, now let's test that it is also the
# fine-grained one
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that a second call like that works the same (should go from the cache)
try:
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that successful calls propagate the results
self.assertTrue(BlockDev.lvm.is_supported_pe_size(4 * 1024))
self.assertEqual(BlockDev.lvm.round_size_to_pe(11 * 1024**2, 4 * 1024**2, True), 12 * 1024**2)
|
Add the first small test for the ErrorProxy
|
Add the first small test for the ErrorProxy
Others should follow later, but let's start with this most simple one that
verifies the very basic functionality.
|
Python
|
lgpl-2.1
|
vpodzime/libblockdev,atodorov/libblockdev,vpodzime/libblockdev,vpodzime/libblockdev,atodorov/libblockdev,atodorov/libblockdev,rhinstaller/libblockdev,rhinstaller/libblockdev,rhinstaller/libblockdev
|
Add the first small test for the ErrorProxy
Others should follow later, but let's start with this most simple one that
verifies the very basic functionality.
|
import unittest
from gi.repository import BlockDev
class OverridesTestCase(unittest.TestCase):
def test_error_proxy(self):
"""Verify that the error proxy works as expected"""
# calls via the error proxy has to be done as
# e.g. BlockDev.swap.swapon() instead of BlockDev.swap_swapon(), since
# BlockDev.swap is an ErrorProxy instance and BlockDev.swap_swapon() is
# the function it calls
# test that exceptions are correctly transformed
try:
# no need to specify priority since we are using overrides that
# define the default value for the parameter (-1)
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
# we caught the generic error, now let's test that it is also the
# fine-grained one
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that a second call like that works the same (should go from the cache)
try:
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that successful calls propagate the results
self.assertTrue(BlockDev.lvm.is_supported_pe_size(4 * 1024))
self.assertEqual(BlockDev.lvm.round_size_to_pe(11 * 1024**2, 4 * 1024**2, True), 12 * 1024**2)
|
<commit_before><commit_msg>Add the first small test for the ErrorProxy
Others should follow later, but let's start with this most simple one that
verifies the very basic functionality.<commit_after>
|
import unittest
from gi.repository import BlockDev
class OverridesTestCase(unittest.TestCase):
def test_error_proxy(self):
"""Verify that the error proxy works as expected"""
# calls via the error proxy has to be done as
# e.g. BlockDev.swap.swapon() instead of BlockDev.swap_swapon(), since
# BlockDev.swap is an ErrorProxy instance and BlockDev.swap_swapon() is
# the function it calls
# test that exceptions are correctly transformed
try:
# no need to specify priority since we are using overrides that
# define the default value for the parameter (-1)
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
# we caught the generic error, now let's test that it is also the
# fine-grained one
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that a second call like that works the same (should go from the cache)
try:
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that successful calls propagate the results
self.assertTrue(BlockDev.lvm.is_supported_pe_size(4 * 1024))
self.assertEqual(BlockDev.lvm.round_size_to_pe(11 * 1024**2, 4 * 1024**2, True), 12 * 1024**2)
|
Add the first small test for the ErrorProxy
Others should follow later, but let's start with this most simple one that
verifies the very basic functionality.import unittest
from gi.repository import BlockDev
class OverridesTestCase(unittest.TestCase):
def test_error_proxy(self):
"""Verify that the error proxy works as expected"""
# calls via the error proxy has to be done as
# e.g. BlockDev.swap.swapon() instead of BlockDev.swap_swapon(), since
# BlockDev.swap is an ErrorProxy instance and BlockDev.swap_swapon() is
# the function it calls
# test that exceptions are correctly transformed
try:
# no need to specify priority since we are using overrides that
# define the default value for the parameter (-1)
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
# we caught the generic error, now let's test that it is also the
# fine-grained one
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that a second call like that works the same (should go from the cache)
try:
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that successful calls propagate the results
self.assertTrue(BlockDev.lvm.is_supported_pe_size(4 * 1024))
self.assertEqual(BlockDev.lvm.round_size_to_pe(11 * 1024**2, 4 * 1024**2, True), 12 * 1024**2)
|
<commit_before><commit_msg>Add the first small test for the ErrorProxy
Others should follow later, but let's start with this most simple one that
verifies the very basic functionality.<commit_after>import unittest
from gi.repository import BlockDev
class OverridesTestCase(unittest.TestCase):
def test_error_proxy(self):
"""Verify that the error proxy works as expected"""
# calls via the error proxy has to be done as
# e.g. BlockDev.swap.swapon() instead of BlockDev.swap_swapon(), since
# BlockDev.swap is an ErrorProxy instance and BlockDev.swap_swapon() is
# the function it calls
# test that exceptions are correctly transformed
try:
# no need to specify priority since we are using overrides that
# define the default value for the parameter (-1)
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
# we caught the generic error, now let's test that it is also the
# fine-grained one
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that a second call like that works the same (should go from the cache)
try:
BlockDev.swap.swapon("/non/existing")
except BlockDev.BlockDevError as e:
self.assertTrue(isinstance(e, BlockDev.SwapError))
# test that successful calls propagate the results
self.assertTrue(BlockDev.lvm.is_supported_pe_size(4 * 1024))
self.assertEqual(BlockDev.lvm.round_size_to_pe(11 * 1024**2, 4 * 1024**2, True), 12 * 1024**2)
|
|
3c47e583d82911967103f13148f6ece78ded09bb
|
python/ql/test/library-tests/frameworks/aiohttp/app_conf_test.py
|
python/ql/test/library-tests/frameworks/aiohttp/app_conf_test.py
|
"""
This file is a test of an extra data-flow step that we want to have for
aiohttp.web.Application
We don't really have an established way to test extra data-flow steps in external
libraries right now, so for now I've just used our normal taint-flow testing ¯\_(ツ)_/¯
see https://docs.aiohttp.org/en/stable/web_advanced.html#application-s-config
"""
from aiohttp import web
# to make code runable
TAINTED_STRING = "TAINTED_STRING"
def ensure_tainted(*args, **kwargs):
pass
ensure_tainted(
TAINTED_STRING # $ tainted
)
async def example(request: web.Request): # $ requestHandler
return web.Response(text=f'example {request.app["foo"]=}') # $ HttpResponse
async def also_works(request: web.Request): # $ requestHandler
return web.Response(text=f'also_works {request.config_dict["foo"]=}') # $ HttpResponse
async def taint_test(request: web.Request): # $ requestHandler
ensure_tainted(
request.app["ts"], # $ MISSING: tainted
request.config_dict["ts"], # $ MISSING: tainted
)
return web.Response(text="ok") # $ HttpResponse
app = web.Application()
app.router.add_get("", example) # $ routeSetup=""
app.router.add_get("/also-works", also_works) # $ routeSetup="/also-works"
app.router.add_get("/taint-test", taint_test) # $ routeSetup="/taint-test"
app["foo"] = 42
app["ts"] = TAINTED_STRING
if __name__ == "__main__":
web.run_app(app)
|
Add test for missing data-flow step in aiohttp.web
|
Python: Add test for missing data-flow step in aiohttp.web
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
Python: Add test for missing data-flow step in aiohttp.web
|
"""
This file is a test of an extra data-flow step that we want to have for
aiohttp.web.Application
We don't really have an established way to test extra data-flow steps in external
libraries right now, so for now I've just used our normal taint-flow testing ¯\_(ツ)_/¯
see https://docs.aiohttp.org/en/stable/web_advanced.html#application-s-config
"""
from aiohttp import web
# to make code runable
TAINTED_STRING = "TAINTED_STRING"
def ensure_tainted(*args, **kwargs):
pass
ensure_tainted(
TAINTED_STRING # $ tainted
)
async def example(request: web.Request): # $ requestHandler
return web.Response(text=f'example {request.app["foo"]=}') # $ HttpResponse
async def also_works(request: web.Request): # $ requestHandler
return web.Response(text=f'also_works {request.config_dict["foo"]=}') # $ HttpResponse
async def taint_test(request: web.Request): # $ requestHandler
ensure_tainted(
request.app["ts"], # $ MISSING: tainted
request.config_dict["ts"], # $ MISSING: tainted
)
return web.Response(text="ok") # $ HttpResponse
app = web.Application()
app.router.add_get("", example) # $ routeSetup=""
app.router.add_get("/also-works", also_works) # $ routeSetup="/also-works"
app.router.add_get("/taint-test", taint_test) # $ routeSetup="/taint-test"
app["foo"] = 42
app["ts"] = TAINTED_STRING
if __name__ == "__main__":
web.run_app(app)
|
<commit_before><commit_msg>Python: Add test for missing data-flow step in aiohttp.web<commit_after>
|
"""
This file is a test of an extra data-flow step that we want to have for
aiohttp.web.Application
We don't really have an established way to test extra data-flow steps in external
libraries right now, so for now I've just used our normal taint-flow testing ¯\_(ツ)_/¯
see https://docs.aiohttp.org/en/stable/web_advanced.html#application-s-config
"""
from aiohttp import web
# to make code runable
TAINTED_STRING = "TAINTED_STRING"
def ensure_tainted(*args, **kwargs):
pass
ensure_tainted(
TAINTED_STRING # $ tainted
)
async def example(request: web.Request): # $ requestHandler
return web.Response(text=f'example {request.app["foo"]=}') # $ HttpResponse
async def also_works(request: web.Request): # $ requestHandler
return web.Response(text=f'also_works {request.config_dict["foo"]=}') # $ HttpResponse
async def taint_test(request: web.Request): # $ requestHandler
ensure_tainted(
request.app["ts"], # $ MISSING: tainted
request.config_dict["ts"], # $ MISSING: tainted
)
return web.Response(text="ok") # $ HttpResponse
app = web.Application()
app.router.add_get("", example) # $ routeSetup=""
app.router.add_get("/also-works", also_works) # $ routeSetup="/also-works"
app.router.add_get("/taint-test", taint_test) # $ routeSetup="/taint-test"
app["foo"] = 42
app["ts"] = TAINTED_STRING
if __name__ == "__main__":
web.run_app(app)
|
Python: Add test for missing data-flow step in aiohttp.web"""
This file is a test of an extra data-flow step that we want to have for
aiohttp.web.Application
We don't really have an established way to test extra data-flow steps in external
libraries right now, so for now I've just used our normal taint-flow testing ¯\_(ツ)_/¯
see https://docs.aiohttp.org/en/stable/web_advanced.html#application-s-config
"""
from aiohttp import web
# to make code runable
TAINTED_STRING = "TAINTED_STRING"
def ensure_tainted(*args, **kwargs):
pass
ensure_tainted(
TAINTED_STRING # $ tainted
)
async def example(request: web.Request): # $ requestHandler
return web.Response(text=f'example {request.app["foo"]=}') # $ HttpResponse
async def also_works(request: web.Request): # $ requestHandler
return web.Response(text=f'also_works {request.config_dict["foo"]=}') # $ HttpResponse
async def taint_test(request: web.Request): # $ requestHandler
ensure_tainted(
request.app["ts"], # $ MISSING: tainted
request.config_dict["ts"], # $ MISSING: tainted
)
return web.Response(text="ok") # $ HttpResponse
app = web.Application()
app.router.add_get("", example) # $ routeSetup=""
app.router.add_get("/also-works", also_works) # $ routeSetup="/also-works"
app.router.add_get("/taint-test", taint_test) # $ routeSetup="/taint-test"
app["foo"] = 42
app["ts"] = TAINTED_STRING
if __name__ == "__main__":
web.run_app(app)
|
<commit_before><commit_msg>Python: Add test for missing data-flow step in aiohttp.web<commit_after>"""
This file is a test of an extra data-flow step that we want to have for
aiohttp.web.Application
We don't really have an established way to test extra data-flow steps in external
libraries right now, so for now I've just used our normal taint-flow testing ¯\_(ツ)_/¯
see https://docs.aiohttp.org/en/stable/web_advanced.html#application-s-config
"""
from aiohttp import web
# to make code runable
TAINTED_STRING = "TAINTED_STRING"
def ensure_tainted(*args, **kwargs):
pass
ensure_tainted(
TAINTED_STRING # $ tainted
)
async def example(request: web.Request): # $ requestHandler
return web.Response(text=f'example {request.app["foo"]=}') # $ HttpResponse
async def also_works(request: web.Request): # $ requestHandler
return web.Response(text=f'also_works {request.config_dict["foo"]=}') # $ HttpResponse
async def taint_test(request: web.Request): # $ requestHandler
ensure_tainted(
request.app["ts"], # $ MISSING: tainted
request.config_dict["ts"], # $ MISSING: tainted
)
return web.Response(text="ok") # $ HttpResponse
app = web.Application()
app.router.add_get("", example) # $ routeSetup=""
app.router.add_get("/also-works", also_works) # $ routeSetup="/also-works"
app.router.add_get("/taint-test", taint_test) # $ routeSetup="/taint-test"
app["foo"] = 42
app["ts"] = TAINTED_STRING
if __name__ == "__main__":
web.run_app(app)
|
|
c6a5b1b54eeb15c173a59d42a2cb7dbfac81b982
|
Sketches/JL/IRC/plainPython/parser.py
|
Sketches/JL/IRC/plainPython/parser.py
|
#! /usr/bin/env python
def parseRpl(segment):
after_number = segment.find('</a>')
number = segment[after_number-3:after_number]
at_name = segment.find('</td><td class="t">') + len('</td><td class="t">')
after_name = segment.find('</td>', at_name)
if segment[at_name:at_name + 7] == '<a href':
after_name = segment.find('</a>', after_number + 1)
at_name = segment.find('>', at_name) + 1
name = segment[at_name:after_name]
name = name.rstrip()
at_format = segment.find('</td><td class="t">', after_name) + len('</td><td class="t">')
after_format = segment.find('\n', at_format)
format = segment[at_format: after_format]
format = format.replace('>', '>')
format = format.replace('<', '<')
at_comments = segment.find('</td><td>', after_format) + len('</td><td>')
comments = segment[at_comments:]
return number, name, format, comments
def appendParsed(lines, number, name, format, comments):
tab = ' '
if len(lines) > 0:
lastNumber = lines[-3][0:2]
if number == lastNumber:
return
lines.append(number + tab + name + tab + '"'+format+'"' + "\n")
lines.append( '### ' + comments)
lines.append("\n")
read = open("/home/jlei/files/irc2numerics.html", "r")
write = open("/home/jlei/files/numeric_replies.txt", "w")
text = read.read()
read.close()
text = text.split('</td></tr>')
write.write("###courtesy of pickle@alient.net.au, at http://www.alien.net.au/irc/irc2numerics.html\n\n")
lines = []
for segment in text:
number, name, format, comments = parseRpl(segment)
appendParsed(lines, number, name, format, comments)
write.writelines(lines)
|
Copy of the script used to make the reference IRC numeric_replies file
|
Copy of the script used to make the reference IRC numeric_replies file
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
Copy of the script used to make the reference IRC numeric_replies file
|
#! /usr/bin/env python
def parseRpl(segment):
after_number = segment.find('</a>')
number = segment[after_number-3:after_number]
at_name = segment.find('</td><td class="t">') + len('</td><td class="t">')
after_name = segment.find('</td>', at_name)
if segment[at_name:at_name + 7] == '<a href':
after_name = segment.find('</a>', after_number + 1)
at_name = segment.find('>', at_name) + 1
name = segment[at_name:after_name]
name = name.rstrip()
at_format = segment.find('</td><td class="t">', after_name) + len('</td><td class="t">')
after_format = segment.find('\n', at_format)
format = segment[at_format: after_format]
format = format.replace('>', '>')
format = format.replace('<', '<')
at_comments = segment.find('</td><td>', after_format) + len('</td><td>')
comments = segment[at_comments:]
return number, name, format, comments
def appendParsed(lines, number, name, format, comments):
tab = ' '
if len(lines) > 0:
lastNumber = lines[-3][0:2]
if number == lastNumber:
return
lines.append(number + tab + name + tab + '"'+format+'"' + "\n")
lines.append( '### ' + comments)
lines.append("\n")
read = open("/home/jlei/files/irc2numerics.html", "r")
write = open("/home/jlei/files/numeric_replies.txt", "w")
text = read.read()
read.close()
text = text.split('</td></tr>')
write.write("###courtesy of pickle@alient.net.au, at http://www.alien.net.au/irc/irc2numerics.html\n\n")
lines = []
for segment in text:
number, name, format, comments = parseRpl(segment)
appendParsed(lines, number, name, format, comments)
write.writelines(lines)
|
<commit_before><commit_msg>Copy of the script used to make the reference IRC numeric_replies file<commit_after>
|
#! /usr/bin/env python
def parseRpl(segment):
after_number = segment.find('</a>')
number = segment[after_number-3:after_number]
at_name = segment.find('</td><td class="t">') + len('</td><td class="t">')
after_name = segment.find('</td>', at_name)
if segment[at_name:at_name + 7] == '<a href':
after_name = segment.find('</a>', after_number + 1)
at_name = segment.find('>', at_name) + 1
name = segment[at_name:after_name]
name = name.rstrip()
at_format = segment.find('</td><td class="t">', after_name) + len('</td><td class="t">')
after_format = segment.find('\n', at_format)
format = segment[at_format: after_format]
format = format.replace('>', '>')
format = format.replace('<', '<')
at_comments = segment.find('</td><td>', after_format) + len('</td><td>')
comments = segment[at_comments:]
return number, name, format, comments
def appendParsed(lines, number, name, format, comments):
tab = ' '
if len(lines) > 0:
lastNumber = lines[-3][0:2]
if number == lastNumber:
return
lines.append(number + tab + name + tab + '"'+format+'"' + "\n")
lines.append( '### ' + comments)
lines.append("\n")
read = open("/home/jlei/files/irc2numerics.html", "r")
write = open("/home/jlei/files/numeric_replies.txt", "w")
text = read.read()
read.close()
text = text.split('</td></tr>')
write.write("###courtesy of pickle@alient.net.au, at http://www.alien.net.au/irc/irc2numerics.html\n\n")
lines = []
for segment in text:
number, name, format, comments = parseRpl(segment)
appendParsed(lines, number, name, format, comments)
write.writelines(lines)
|
Copy of the script used to make the reference IRC numeric_replies file#! /usr/bin/env python
def parseRpl(segment):
after_number = segment.find('</a>')
number = segment[after_number-3:after_number]
at_name = segment.find('</td><td class="t">') + len('</td><td class="t">')
after_name = segment.find('</td>', at_name)
if segment[at_name:at_name + 7] == '<a href':
after_name = segment.find('</a>', after_number + 1)
at_name = segment.find('>', at_name) + 1
name = segment[at_name:after_name]
name = name.rstrip()
at_format = segment.find('</td><td class="t">', after_name) + len('</td><td class="t">')
after_format = segment.find('\n', at_format)
format = segment[at_format: after_format]
format = format.replace('>', '>')
format = format.replace('<', '<')
at_comments = segment.find('</td><td>', after_format) + len('</td><td>')
comments = segment[at_comments:]
return number, name, format, comments
def appendParsed(lines, number, name, format, comments):
tab = ' '
if len(lines) > 0:
lastNumber = lines[-3][0:2]
if number == lastNumber:
return
lines.append(number + tab + name + tab + '"'+format+'"' + "\n")
lines.append( '### ' + comments)
lines.append("\n")
read = open("/home/jlei/files/irc2numerics.html", "r")
write = open("/home/jlei/files/numeric_replies.txt", "w")
text = read.read()
read.close()
text = text.split('</td></tr>')
write.write("###courtesy of pickle@alient.net.au, at http://www.alien.net.au/irc/irc2numerics.html\n\n")
lines = []
for segment in text:
number, name, format, comments = parseRpl(segment)
appendParsed(lines, number, name, format, comments)
write.writelines(lines)
|
<commit_before><commit_msg>Copy of the script used to make the reference IRC numeric_replies file<commit_after>#! /usr/bin/env python
def parseRpl(segment):
after_number = segment.find('</a>')
number = segment[after_number-3:after_number]
at_name = segment.find('</td><td class="t">') + len('</td><td class="t">')
after_name = segment.find('</td>', at_name)
if segment[at_name:at_name + 7] == '<a href':
after_name = segment.find('</a>', after_number + 1)
at_name = segment.find('>', at_name) + 1
name = segment[at_name:after_name]
name = name.rstrip()
at_format = segment.find('</td><td class="t">', after_name) + len('</td><td class="t">')
after_format = segment.find('\n', at_format)
format = segment[at_format: after_format]
format = format.replace('>', '>')
format = format.replace('<', '<')
at_comments = segment.find('</td><td>', after_format) + len('</td><td>')
comments = segment[at_comments:]
return number, name, format, comments
def appendParsed(lines, number, name, format, comments):
tab = ' '
if len(lines) > 0:
lastNumber = lines[-3][0:2]
if number == lastNumber:
return
lines.append(number + tab + name + tab + '"'+format+'"' + "\n")
lines.append( '### ' + comments)
lines.append("\n")
read = open("/home/jlei/files/irc2numerics.html", "r")
write = open("/home/jlei/files/numeric_replies.txt", "w")
text = read.read()
read.close()
text = text.split('</td></tr>')
write.write("###courtesy of pickle@alient.net.au, at http://www.alien.net.au/irc/irc2numerics.html\n\n")
lines = []
for segment in text:
number, name, format, comments = parseRpl(segment)
appendParsed(lines, number, name, format, comments)
write.writelines(lines)
|
|
8d31f3b6a573759546e7dbc694d9f63647b0cc43
|
photutils/segmentation/tests/test_finder.py
|
photutils/segmentation/tests/test_finder.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the finder module.
"""
from astropy.convolution import Gaussian2DKernel, convolve
from astropy.stats import gaussian_fwhm_to_sigma
import pytest
from ..finder import SourceFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
from ...utils._optional_deps import HAS_SCIPY, HAS_SKIMAGE # noqa
class TestSourceFinder:
data = make_100gaussians_image() - 5.0 # subtract background
sigma = 3. * gaussian_fwhm_to_sigma # FWHM = 3.
kernel = Gaussian2DKernel(sigma, x_size=5, y_size=5)
convolved_data = convolve(data, kernel, normalize_kernel=True)
threshold = 1.5 * 2.0
npixels = 10
def test_deblend(self):
finder = SourceFinder(npixels=self.npixels)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 94
def test_no_deblend(self):
finder = SourceFinder(npixels=self.npixels, deblend=False)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 86
def test_no_sources(self):
finder = SourceFinder(npixels=self.npixels, deblend=True)
with pytest.warns(NoDetectionsWarning):
segm = finder(self.convolved_data, 1000)
assert segm is None
|
Add unit tests for SourceFinder
|
Add unit tests for SourceFinder
|
Python
|
bsd-3-clause
|
astropy/photutils,larrybradley/photutils
|
Add unit tests for SourceFinder
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the finder module.
"""
from astropy.convolution import Gaussian2DKernel, convolve
from astropy.stats import gaussian_fwhm_to_sigma
import pytest
from ..finder import SourceFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
from ...utils._optional_deps import HAS_SCIPY, HAS_SKIMAGE # noqa
class TestSourceFinder:
data = make_100gaussians_image() - 5.0 # subtract background
sigma = 3. * gaussian_fwhm_to_sigma # FWHM = 3.
kernel = Gaussian2DKernel(sigma, x_size=5, y_size=5)
convolved_data = convolve(data, kernel, normalize_kernel=True)
threshold = 1.5 * 2.0
npixels = 10
def test_deblend(self):
finder = SourceFinder(npixels=self.npixels)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 94
def test_no_deblend(self):
finder = SourceFinder(npixels=self.npixels, deblend=False)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 86
def test_no_sources(self):
finder = SourceFinder(npixels=self.npixels, deblend=True)
with pytest.warns(NoDetectionsWarning):
segm = finder(self.convolved_data, 1000)
assert segm is None
|
<commit_before><commit_msg>Add unit tests for SourceFinder<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the finder module.
"""
from astropy.convolution import Gaussian2DKernel, convolve
from astropy.stats import gaussian_fwhm_to_sigma
import pytest
from ..finder import SourceFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
from ...utils._optional_deps import HAS_SCIPY, HAS_SKIMAGE # noqa
class TestSourceFinder:
data = make_100gaussians_image() - 5.0 # subtract background
sigma = 3. * gaussian_fwhm_to_sigma # FWHM = 3.
kernel = Gaussian2DKernel(sigma, x_size=5, y_size=5)
convolved_data = convolve(data, kernel, normalize_kernel=True)
threshold = 1.5 * 2.0
npixels = 10
def test_deblend(self):
finder = SourceFinder(npixels=self.npixels)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 94
def test_no_deblend(self):
finder = SourceFinder(npixels=self.npixels, deblend=False)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 86
def test_no_sources(self):
finder = SourceFinder(npixels=self.npixels, deblend=True)
with pytest.warns(NoDetectionsWarning):
segm = finder(self.convolved_data, 1000)
assert segm is None
|
Add unit tests for SourceFinder# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the finder module.
"""
from astropy.convolution import Gaussian2DKernel, convolve
from astropy.stats import gaussian_fwhm_to_sigma
import pytest
from ..finder import SourceFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
from ...utils._optional_deps import HAS_SCIPY, HAS_SKIMAGE # noqa
class TestSourceFinder:
data = make_100gaussians_image() - 5.0 # subtract background
sigma = 3. * gaussian_fwhm_to_sigma # FWHM = 3.
kernel = Gaussian2DKernel(sigma, x_size=5, y_size=5)
convolved_data = convolve(data, kernel, normalize_kernel=True)
threshold = 1.5 * 2.0
npixels = 10
def test_deblend(self):
finder = SourceFinder(npixels=self.npixels)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 94
def test_no_deblend(self):
finder = SourceFinder(npixels=self.npixels, deblend=False)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 86
def test_no_sources(self):
finder = SourceFinder(npixels=self.npixels, deblend=True)
with pytest.warns(NoDetectionsWarning):
segm = finder(self.convolved_data, 1000)
assert segm is None
|
<commit_before><commit_msg>Add unit tests for SourceFinder<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the finder module.
"""
from astropy.convolution import Gaussian2DKernel, convolve
from astropy.stats import gaussian_fwhm_to_sigma
import pytest
from ..finder import SourceFinder
from ...datasets import make_100gaussians_image
from ...utils.exceptions import NoDetectionsWarning
from ...utils._optional_deps import HAS_SCIPY, HAS_SKIMAGE # noqa
class TestSourceFinder:
data = make_100gaussians_image() - 5.0 # subtract background
sigma = 3. * gaussian_fwhm_to_sigma # FWHM = 3.
kernel = Gaussian2DKernel(sigma, x_size=5, y_size=5)
convolved_data = convolve(data, kernel, normalize_kernel=True)
threshold = 1.5 * 2.0
npixels = 10
def test_deblend(self):
finder = SourceFinder(npixels=self.npixels)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 94
def test_no_deblend(self):
finder = SourceFinder(npixels=self.npixels, deblend=False)
segm = finder(self.convolved_data, self.threshold)
assert segm.nlabels == 86
def test_no_sources(self):
finder = SourceFinder(npixels=self.npixels, deblend=True)
with pytest.warns(NoDetectionsWarning):
segm = finder(self.convolved_data, 1000)
assert segm is None
|
|
1753d0b8d04f479ee52673a58863bdc01d3a1df6
|
runners/tests/test_models.py
|
runners/tests/test_models.py
|
from django.test import TestCase
from runners.models import RunnerVersion, Runner
class TestRunnerVersions(TestCase):
def setUp(self):
self.runner = Runner.objects.create(name='wine')
def test_versions_are_ordered_correctly(self):
RunnerVersion.objects.create(runner=self.runner, version='1.9.14')
RunnerVersion.objects.create(runner=self.runner, version='1.9.4')
RunnerVersion.objects.create(runner=self.runner, version='1.9.1')
RunnerVersion.objects.create(runner=self.runner, version='1.8')
RunnerVersion.objects.create(runner=self.runner, version='1.7')
RunnerVersion.objects.create(runner=self.runner, version='1.7.50')
versions = self.runner.versions
self.assertEqual(versions[0].version, '1.7')
self.assertEqual(versions[1].version, '1.7.50')
self.assertEqual(versions[2].version, '1.8')
self.assertEqual(versions[3].version, '1.9.1')
self.assertEqual(versions[4].version, '1.9.4')
self.assertEqual(versions[5].version, '1.9.14')
|
Add tests for version sorting
|
Add tests for version sorting
|
Python
|
agpl-3.0
|
Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website
|
Add tests for version sorting
|
from django.test import TestCase
from runners.models import RunnerVersion, Runner
class TestRunnerVersions(TestCase):
def setUp(self):
self.runner = Runner.objects.create(name='wine')
def test_versions_are_ordered_correctly(self):
RunnerVersion.objects.create(runner=self.runner, version='1.9.14')
RunnerVersion.objects.create(runner=self.runner, version='1.9.4')
RunnerVersion.objects.create(runner=self.runner, version='1.9.1')
RunnerVersion.objects.create(runner=self.runner, version='1.8')
RunnerVersion.objects.create(runner=self.runner, version='1.7')
RunnerVersion.objects.create(runner=self.runner, version='1.7.50')
versions = self.runner.versions
self.assertEqual(versions[0].version, '1.7')
self.assertEqual(versions[1].version, '1.7.50')
self.assertEqual(versions[2].version, '1.8')
self.assertEqual(versions[3].version, '1.9.1')
self.assertEqual(versions[4].version, '1.9.4')
self.assertEqual(versions[5].version, '1.9.14')
|
<commit_before><commit_msg>Add tests for version sorting<commit_after>
|
from django.test import TestCase
from runners.models import RunnerVersion, Runner
class TestRunnerVersions(TestCase):
def setUp(self):
self.runner = Runner.objects.create(name='wine')
def test_versions_are_ordered_correctly(self):
RunnerVersion.objects.create(runner=self.runner, version='1.9.14')
RunnerVersion.objects.create(runner=self.runner, version='1.9.4')
RunnerVersion.objects.create(runner=self.runner, version='1.9.1')
RunnerVersion.objects.create(runner=self.runner, version='1.8')
RunnerVersion.objects.create(runner=self.runner, version='1.7')
RunnerVersion.objects.create(runner=self.runner, version='1.7.50')
versions = self.runner.versions
self.assertEqual(versions[0].version, '1.7')
self.assertEqual(versions[1].version, '1.7.50')
self.assertEqual(versions[2].version, '1.8')
self.assertEqual(versions[3].version, '1.9.1')
self.assertEqual(versions[4].version, '1.9.4')
self.assertEqual(versions[5].version, '1.9.14')
|
Add tests for version sortingfrom django.test import TestCase
from runners.models import RunnerVersion, Runner
class TestRunnerVersions(TestCase):
def setUp(self):
self.runner = Runner.objects.create(name='wine')
def test_versions_are_ordered_correctly(self):
RunnerVersion.objects.create(runner=self.runner, version='1.9.14')
RunnerVersion.objects.create(runner=self.runner, version='1.9.4')
RunnerVersion.objects.create(runner=self.runner, version='1.9.1')
RunnerVersion.objects.create(runner=self.runner, version='1.8')
RunnerVersion.objects.create(runner=self.runner, version='1.7')
RunnerVersion.objects.create(runner=self.runner, version='1.7.50')
versions = self.runner.versions
self.assertEqual(versions[0].version, '1.7')
self.assertEqual(versions[1].version, '1.7.50')
self.assertEqual(versions[2].version, '1.8')
self.assertEqual(versions[3].version, '1.9.1')
self.assertEqual(versions[4].version, '1.9.4')
self.assertEqual(versions[5].version, '1.9.14')
|
<commit_before><commit_msg>Add tests for version sorting<commit_after>from django.test import TestCase
from runners.models import RunnerVersion, Runner
class TestRunnerVersions(TestCase):
def setUp(self):
self.runner = Runner.objects.create(name='wine')
def test_versions_are_ordered_correctly(self):
RunnerVersion.objects.create(runner=self.runner, version='1.9.14')
RunnerVersion.objects.create(runner=self.runner, version='1.9.4')
RunnerVersion.objects.create(runner=self.runner, version='1.9.1')
RunnerVersion.objects.create(runner=self.runner, version='1.8')
RunnerVersion.objects.create(runner=self.runner, version='1.7')
RunnerVersion.objects.create(runner=self.runner, version='1.7.50')
versions = self.runner.versions
self.assertEqual(versions[0].version, '1.7')
self.assertEqual(versions[1].version, '1.7.50')
self.assertEqual(versions[2].version, '1.8')
self.assertEqual(versions[3].version, '1.9.1')
self.assertEqual(versions[4].version, '1.9.4')
self.assertEqual(versions[5].version, '1.9.14')
|
|
fc586c2571328f1591bb396b9e9e7dcb0730b610
|
src/core/migrations/0049_fix_review_accept_acknowledgement_url.py
|
src/core/migrations/0049_fix_review_accept_acknowledgement_url.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-17 10:48
from __future__ import unicode_literals
import re
from django.db import migrations
FROM_RE = re.compile("{{ ?do_review_url ?}}")
TO = "{{ review_url }}"
def replace_setting_urls(apps, schema_editor):
SettingValueTranslation = apps.get_model('core', 'SettingValueTranslation')
settings = SettingValueTranslation.objects.filter(master__setting__group__name="email")
for s in settings:
fix_url(s)
def fix_url(setting):
value = setting.value
new_value = FROM_RE.sub(TO, value)
setting.value = new_value
setting.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0048_add_article_details_to_review_assignment_email'),
]
operations = [
migrations.RunPython(replace_setting_urls, reverse_code=migrations.RunPython.noop),
]
|
Fix wrong URL name in review_accept_acknowledgement
|
Fix wrong URL name in review_accept_acknowledgement
|
Python
|
agpl-3.0
|
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
|
Fix wrong URL name in review_accept_acknowledgement
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-17 10:48
from __future__ import unicode_literals
import re
from django.db import migrations
FROM_RE = re.compile("{{ ?do_review_url ?}}")
TO = "{{ review_url }}"
def replace_setting_urls(apps, schema_editor):
SettingValueTranslation = apps.get_model('core', 'SettingValueTranslation')
settings = SettingValueTranslation.objects.filter(master__setting__group__name="email")
for s in settings:
fix_url(s)
def fix_url(setting):
value = setting.value
new_value = FROM_RE.sub(TO, value)
setting.value = new_value
setting.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0048_add_article_details_to_review_assignment_email'),
]
operations = [
migrations.RunPython(replace_setting_urls, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Fix wrong URL name in review_accept_acknowledgement<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-17 10:48
from __future__ import unicode_literals
import re
from django.db import migrations
FROM_RE = re.compile("{{ ?do_review_url ?}}")
TO = "{{ review_url }}"
def replace_setting_urls(apps, schema_editor):
SettingValueTranslation = apps.get_model('core', 'SettingValueTranslation')
settings = SettingValueTranslation.objects.filter(master__setting__group__name="email")
for s in settings:
fix_url(s)
def fix_url(setting):
value = setting.value
new_value = FROM_RE.sub(TO, value)
setting.value = new_value
setting.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0048_add_article_details_to_review_assignment_email'),
]
operations = [
migrations.RunPython(replace_setting_urls, reverse_code=migrations.RunPython.noop),
]
|
Fix wrong URL name in review_accept_acknowledgement# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-17 10:48
from __future__ import unicode_literals
import re
from django.db import migrations
FROM_RE = re.compile("{{ ?do_review_url ?}}")
TO = "{{ review_url }}"
def replace_setting_urls(apps, schema_editor):
SettingValueTranslation = apps.get_model('core', 'SettingValueTranslation')
settings = SettingValueTranslation.objects.filter(master__setting__group__name="email")
for s in settings:
fix_url(s)
def fix_url(setting):
value = setting.value
new_value = FROM_RE.sub(TO, value)
setting.value = new_value
setting.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0048_add_article_details_to_review_assignment_email'),
]
operations = [
migrations.RunPython(replace_setting_urls, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Fix wrong URL name in review_accept_acknowledgement<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-17 10:48
from __future__ import unicode_literals
import re
from django.db import migrations
FROM_RE = re.compile("{{ ?do_review_url ?}}")
TO = "{{ review_url }}"
def replace_setting_urls(apps, schema_editor):
SettingValueTranslation = apps.get_model('core', 'SettingValueTranslation')
settings = SettingValueTranslation.objects.filter(master__setting__group__name="email")
for s in settings:
fix_url(s)
def fix_url(setting):
value = setting.value
new_value = FROM_RE.sub(TO, value)
setting.value = new_value
setting.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0048_add_article_details_to_review_assignment_email'),
]
operations = [
migrations.RunPython(replace_setting_urls, reverse_code=migrations.RunPython.noop),
]
|
|
8ef2fce934e38e135898b707a1491edf56d675ac
|
salt/client/raet/__init__.py
|
salt/client/raet/__init__.py
|
# -*- coding: utf-8 -*-
'''
The client libs to communicate with the salt master when running raet
'''
# Import python libs
import os
import time
import logging
# Import Salt libs
from salt.transport.road.raet import stacking
from salt.transport.road.raet import yarding
import salt.config
import salt.client
import salt.syspaths as syspaths
log = logging.getLogger(__name__)
class LocalClient(salt.client.LocalClient):
'''
The RAET LocalClient
'''
def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts)
def pub(self,
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs):
'''
Publish the command!
'''
payload_kwargs = self._prep_pub(
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs)
stack = stacking.StackUxd(lanename='com', dirpath=self.opts['sock_dir'])
router_yard = yarding.Yard(
name='router',
lanename='com',
yid=0,
dirpath=self.opts['sock_dir'])
stack.addRemoteYard(router_yard)
route = {'dst': (None, router_yard.name, 'local_cmd')}
msg = {'route': route, 'load': payload_kwargs}
stack.transmit(msg)
stack.serviceAll()
while True:
time.sleep(0.01)
stack.serviceAll()
for msg in stack.rxMsgs:
return msg
|
Add raet LocalClient - just starting
|
Add raet LocalClient - just starting
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add raet LocalClient - just starting
|
# -*- coding: utf-8 -*-
'''
The client libs to communicate with the salt master when running raet
'''
# Import python libs
import os
import time
import logging
# Import Salt libs
from salt.transport.road.raet import stacking
from salt.transport.road.raet import yarding
import salt.config
import salt.client
import salt.syspaths as syspaths
log = logging.getLogger(__name__)
class LocalClient(salt.client.LocalClient):
'''
The RAET LocalClient
'''
def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts)
def pub(self,
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs):
'''
Publish the command!
'''
payload_kwargs = self._prep_pub(
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs)
stack = stacking.StackUxd(lanename='com', dirpath=self.opts['sock_dir'])
router_yard = yarding.Yard(
name='router',
lanename='com',
yid=0,
dirpath=self.opts['sock_dir'])
stack.addRemoteYard(router_yard)
route = {'dst': (None, router_yard.name, 'local_cmd')}
msg = {'route': route, 'load': payload_kwargs}
stack.transmit(msg)
stack.serviceAll()
while True:
time.sleep(0.01)
stack.serviceAll()
for msg in stack.rxMsgs:
return msg
|
<commit_before><commit_msg>Add raet LocalClient - just starting<commit_after>
|
# -*- coding: utf-8 -*-
'''
The client libs to communicate with the salt master when running raet
'''
# Import python libs
import os
import time
import logging
# Import Salt libs
from salt.transport.road.raet import stacking
from salt.transport.road.raet import yarding
import salt.config
import salt.client
import salt.syspaths as syspaths
log = logging.getLogger(__name__)
class LocalClient(salt.client.LocalClient):
'''
The RAET LocalClient
'''
def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts)
def pub(self,
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs):
'''
Publish the command!
'''
payload_kwargs = self._prep_pub(
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs)
stack = stacking.StackUxd(lanename='com', dirpath=self.opts['sock_dir'])
router_yard = yarding.Yard(
name='router',
lanename='com',
yid=0,
dirpath=self.opts['sock_dir'])
stack.addRemoteYard(router_yard)
route = {'dst': (None, router_yard.name, 'local_cmd')}
msg = {'route': route, 'load': payload_kwargs}
stack.transmit(msg)
stack.serviceAll()
while True:
time.sleep(0.01)
stack.serviceAll()
for msg in stack.rxMsgs:
return msg
|
Add raet LocalClient - just starting# -*- coding: utf-8 -*-
'''
The client libs to communicate with the salt master when running raet
'''
# Import python libs
import os
import time
import logging
# Import Salt libs
from salt.transport.road.raet import stacking
from salt.transport.road.raet import yarding
import salt.config
import salt.client
import salt.syspaths as syspaths
log = logging.getLogger(__name__)
class LocalClient(salt.client.LocalClient):
'''
The RAET LocalClient
'''
def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts)
def pub(self,
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs):
'''
Publish the command!
'''
payload_kwargs = self._prep_pub(
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs)
stack = stacking.StackUxd(lanename='com', dirpath=self.opts['sock_dir'])
router_yard = yarding.Yard(
name='router',
lanename='com',
yid=0,
dirpath=self.opts['sock_dir'])
stack.addRemoteYard(router_yard)
route = {'dst': (None, router_yard.name, 'local_cmd')}
msg = {'route': route, 'load': payload_kwargs}
stack.transmit(msg)
stack.serviceAll()
while True:
time.sleep(0.01)
stack.serviceAll()
for msg in stack.rxMsgs:
return msg
|
<commit_before><commit_msg>Add raet LocalClient - just starting<commit_after># -*- coding: utf-8 -*-
'''
The client libs to communicate with the salt master when running raet
'''
# Import python libs
import os
import time
import logging
# Import Salt libs
from salt.transport.road.raet import stacking
from salt.transport.road.raet import yarding
import salt.config
import salt.client
import salt.syspaths as syspaths
log = logging.getLogger(__name__)
class LocalClient(salt.client.LocalClient):
'''
The RAET LocalClient
'''
def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts)
def pub(self,
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs):
'''
Publish the command!
'''
payload_kwargs = self._prep_pub(
tgt,
fun,
arg=(),
expr_form='glob',
ret='',
jid='',
timeout=5,
**kwargs)
stack = stacking.StackUxd(lanename='com', dirpath=self.opts['sock_dir'])
router_yard = yarding.Yard(
name='router',
lanename='com',
yid=0,
dirpath=self.opts['sock_dir'])
stack.addRemoteYard(router_yard)
route = {'dst': (None, router_yard.name, 'local_cmd')}
msg = {'route': route, 'load': payload_kwargs}
stack.transmit(msg)
stack.serviceAll()
while True:
time.sleep(0.01)
stack.serviceAll()
for msg in stack.rxMsgs:
return msg
|
|
468d5cc051d01f4440d6f4e5f85bad771994f8de
|
project/velkoja/management/commands/check_nordea_overdue.py
|
project/velkoja/management/commands/check_nordea_overdue.py
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from velkoja.nordeachecker import NordeaOverdueInvoicesHandler
class Command(BaseCommand):
help = 'Check overdue Nordea payments and send emails about them'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
handler = NordeaOverdueInvoicesHandler()
notified = handler.process_overdue(send=True)
if options['verbosity'] > 1:
for n, t in notified:
print("Notified %s about %s" % (n.email, t))
|
Add management command to run the checks
|
Add management command to run the checks
|
Python
|
mit
|
HelsinkiHacklab/asylum,rambo/asylum,rambo/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum
|
Add management command to run the checks
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from velkoja.nordeachecker import NordeaOverdueInvoicesHandler
class Command(BaseCommand):
help = 'Check overdue Nordea payments and send emails about them'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
handler = NordeaOverdueInvoicesHandler()
notified = handler.process_overdue(send=True)
if options['verbosity'] > 1:
for n, t in notified:
print("Notified %s about %s" % (n.email, t))
|
<commit_before><commit_msg>Add management command to run the checks<commit_after>
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from velkoja.nordeachecker import NordeaOverdueInvoicesHandler
class Command(BaseCommand):
help = 'Check overdue Nordea payments and send emails about them'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
handler = NordeaOverdueInvoicesHandler()
notified = handler.process_overdue(send=True)
if options['verbosity'] > 1:
for n, t in notified:
print("Notified %s about %s" % (n.email, t))
|
Add management command to run the checks# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from velkoja.nordeachecker import NordeaOverdueInvoicesHandler
class Command(BaseCommand):
help = 'Check overdue Nordea payments and send emails about them'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
handler = NordeaOverdueInvoicesHandler()
notified = handler.process_overdue(send=True)
if options['verbosity'] > 1:
for n, t in notified:
print("Notified %s about %s" % (n.email, t))
|
<commit_before><commit_msg>Add management command to run the checks<commit_after># -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from velkoja.nordeachecker import NordeaOverdueInvoicesHandler
class Command(BaseCommand):
help = 'Check overdue Nordea payments and send emails about them'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
handler = NordeaOverdueInvoicesHandler()
notified = handler.process_overdue(send=True)
if options['verbosity'] > 1:
for n, t in notified:
print("Notified %s about %s" % (n.email, t))
|
|
8f15649c42b858cef4df3cbd16fb3d207df3aaef
|
script/upload-test-images.py
|
script/upload-test-images.py
|
import girder_client
import os
import sys
def main():
# Use the API key to authenticate.
key = os.environ.get("GIRDER_API_KEY")
if key is None:
print >>sys.stderr, "Environment variable GIRDER_API_KEY is blank. Cannot upload images."
return 1
gc = girder_client.GirderClient(host="data.kitware.com", port=443, scheme="https")
gc.authenticate(apiKey=key)
# Retrieve the target folder, which should be at ~/Public/Travis\ Candela
user = gc.get("user/me")
if user is None:
print >>sys.stderr, "No user logged in; API key may be bad."
return 1
travis_build_number = os.environ.get("TRAVIS_BUILD_NUMBER")
travis_job_number = os.environ.get("TRAVIS_JOB_NUMBER")
folder = gc.load_or_create_folder("Public", user["_id"], "user")
folder = gc.load_or_create_folder("Travis Candela", folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_build_number, folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_job_number, folder["_id"], "folder")
# Upload the files specified on the command line, creating (or loading) a
# folder for each.
for imageFile in sys.argv[1:]:
(dirname, filename) = os.path.split(imageFile)
compName = dirname.split(os.path.sep)[-2]
compFolder = gc.load_or_create_folder(compName, folder["_id"], "folder")
gc._upload_as_item(filename, compFolder["_id"], imageFile)
if __name__ == "__main__":
sys.exit(main())
|
Add script to upload images to Girder
|
Add script to upload images to Girder
|
Python
|
apache-2.0
|
Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela
|
Add script to upload images to Girder
|
import girder_client
import os
import sys
def main():
# Use the API key to authenticate.
key = os.environ.get("GIRDER_API_KEY")
if key is None:
print >>sys.stderr, "Environment variable GIRDER_API_KEY is blank. Cannot upload images."
return 1
gc = girder_client.GirderClient(host="data.kitware.com", port=443, scheme="https")
gc.authenticate(apiKey=key)
# Retrieve the target folder, which should be at ~/Public/Travis\ Candela
user = gc.get("user/me")
if user is None:
print >>sys.stderr, "No user logged in; API key may be bad."
return 1
travis_build_number = os.environ.get("TRAVIS_BUILD_NUMBER")
travis_job_number = os.environ.get("TRAVIS_JOB_NUMBER")
folder = gc.load_or_create_folder("Public", user["_id"], "user")
folder = gc.load_or_create_folder("Travis Candela", folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_build_number, folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_job_number, folder["_id"], "folder")
# Upload the files specified on the command line, creating (or loading) a
# folder for each.
for imageFile in sys.argv[1:]:
(dirname, filename) = os.path.split(imageFile)
compName = dirname.split(os.path.sep)[-2]
compFolder = gc.load_or_create_folder(compName, folder["_id"], "folder")
gc._upload_as_item(filename, compFolder["_id"], imageFile)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script to upload images to Girder<commit_after>
|
import girder_client
import os
import sys
def main():
# Use the API key to authenticate.
key = os.environ.get("GIRDER_API_KEY")
if key is None:
print >>sys.stderr, "Environment variable GIRDER_API_KEY is blank. Cannot upload images."
return 1
gc = girder_client.GirderClient(host="data.kitware.com", port=443, scheme="https")
gc.authenticate(apiKey=key)
# Retrieve the target folder, which should be at ~/Public/Travis\ Candela
user = gc.get("user/me")
if user is None:
print >>sys.stderr, "No user logged in; API key may be bad."
return 1
travis_build_number = os.environ.get("TRAVIS_BUILD_NUMBER")
travis_job_number = os.environ.get("TRAVIS_JOB_NUMBER")
folder = gc.load_or_create_folder("Public", user["_id"], "user")
folder = gc.load_or_create_folder("Travis Candela", folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_build_number, folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_job_number, folder["_id"], "folder")
# Upload the files specified on the command line, creating (or loading) a
# folder for each.
for imageFile in sys.argv[1:]:
(dirname, filename) = os.path.split(imageFile)
compName = dirname.split(os.path.sep)[-2]
compFolder = gc.load_or_create_folder(compName, folder["_id"], "folder")
gc._upload_as_item(filename, compFolder["_id"], imageFile)
if __name__ == "__main__":
sys.exit(main())
|
Add script to upload images to Girderimport girder_client
import os
import sys
def main():
# Use the API key to authenticate.
key = os.environ.get("GIRDER_API_KEY")
if key is None:
print >>sys.stderr, "Environment variable GIRDER_API_KEY is blank. Cannot upload images."
return 1
gc = girder_client.GirderClient(host="data.kitware.com", port=443, scheme="https")
gc.authenticate(apiKey=key)
# Retrieve the target folder, which should be at ~/Public/Travis\ Candela
user = gc.get("user/me")
if user is None:
print >>sys.stderr, "No user logged in; API key may be bad."
return 1
travis_build_number = os.environ.get("TRAVIS_BUILD_NUMBER")
travis_job_number = os.environ.get("TRAVIS_JOB_NUMBER")
folder = gc.load_or_create_folder("Public", user["_id"], "user")
folder = gc.load_or_create_folder("Travis Candela", folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_build_number, folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_job_number, folder["_id"], "folder")
# Upload the files specified on the command line, creating (or loading) a
# folder for each.
for imageFile in sys.argv[1:]:
(dirname, filename) = os.path.split(imageFile)
compName = dirname.split(os.path.sep)[-2]
compFolder = gc.load_or_create_folder(compName, folder["_id"], "folder")
gc._upload_as_item(filename, compFolder["_id"], imageFile)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script to upload images to Girder<commit_after>import girder_client
import os
import sys
def main():
# Use the API key to authenticate.
key = os.environ.get("GIRDER_API_KEY")
if key is None:
print >>sys.stderr, "Environment variable GIRDER_API_KEY is blank. Cannot upload images."
return 1
gc = girder_client.GirderClient(host="data.kitware.com", port=443, scheme="https")
gc.authenticate(apiKey=key)
# Retrieve the target folder, which should be at ~/Public/Travis\ Candela
user = gc.get("user/me")
if user is None:
print >>sys.stderr, "No user logged in; API key may be bad."
return 1
travis_build_number = os.environ.get("TRAVIS_BUILD_NUMBER")
travis_job_number = os.environ.get("TRAVIS_JOB_NUMBER")
folder = gc.load_or_create_folder("Public", user["_id"], "user")
folder = gc.load_or_create_folder("Travis Candela", folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_build_number, folder["_id"], "folder")
folder = gc.load_or_create_folder(travis_job_number, folder["_id"], "folder")
# Upload the files specified on the command line, creating (or loading) a
# folder for each.
for imageFile in sys.argv[1:]:
(dirname, filename) = os.path.split(imageFile)
compName = dirname.split(os.path.sep)[-2]
compFolder = gc.load_or_create_folder(compName, folder["_id"], "folder")
gc._upload_as_item(filename, compFolder["_id"], imageFile)
if __name__ == "__main__":
sys.exit(main())
|
|
91b3e6f4a53330b27941497e10542b99ed27b94c
|
python/partition-dataset.py
|
python/partition-dataset.py
|
import pandas as pd
import operator
df = pd.read_csv('../datasets/movielens-synthesized/ratings-synthesized-1m.csv')
counts = {}
user_ratings = df.groupby('user')
for k, df in user_ratings:
# do something with group
counts[k] = df['user'].count()
print "num users", len(counts)
sorted_x = sorted(counts.iteritems(), key=operator.itemgetter(0), reverse=True)
f = open('myfile','w')
for (user,count) in sorted_x[:400]:
for u, rating in user_ratings.set_index('user',inplace=True)[user]:
f.write('') # python will convert \n to os.linesep
f.close()
|
Add script to partition movielens dataset into a more dense selection.
|
WIP: Add script to partition movielens dataset into a more dense selection.
|
Python
|
mit
|
ntnu-smartmedia/goldfish,monsendag/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish
|
WIP: Add script to partition movielens dataset into a more dense selection.
|
import pandas as pd
import operator
df = pd.read_csv('../datasets/movielens-synthesized/ratings-synthesized-1m.csv')
counts = {}
user_ratings = df.groupby('user')
for k, df in user_ratings:
# do something with group
counts[k] = df['user'].count()
print "num users", len(counts)
sorted_x = sorted(counts.iteritems(), key=operator.itemgetter(0), reverse=True)
f = open('myfile','w')
for (user,count) in sorted_x[:400]:
for u, rating in user_ratings.set_index('user',inplace=True)[user]:
f.write('') # python will convert \n to os.linesep
f.close()
|
<commit_before><commit_msg>WIP: Add script to partition movielens dataset into a more dense selection.<commit_after>
|
import pandas as pd
import operator
df = pd.read_csv('../datasets/movielens-synthesized/ratings-synthesized-1m.csv')
counts = {}
user_ratings = df.groupby('user')
for k, df in user_ratings:
# do something with group
counts[k] = df['user'].count()
print "num users", len(counts)
sorted_x = sorted(counts.iteritems(), key=operator.itemgetter(0), reverse=True)
f = open('myfile','w')
for (user,count) in sorted_x[:400]:
for u, rating in user_ratings.set_index('user',inplace=True)[user]:
f.write('') # python will convert \n to os.linesep
f.close()
|
WIP: Add script to partition movielens dataset into a more dense selection.
import pandas as pd
import operator
df = pd.read_csv('../datasets/movielens-synthesized/ratings-synthesized-1m.csv')
counts = {}
user_ratings = df.groupby('user')
for k, df in user_ratings:
# do something with group
counts[k] = df['user'].count()
print "num users", len(counts)
sorted_x = sorted(counts.iteritems(), key=operator.itemgetter(0), reverse=True)
f = open('myfile','w')
for (user,count) in sorted_x[:400]:
for u, rating in user_ratings.set_index('user',inplace=True)[user]:
f.write('') # python will convert \n to os.linesep
f.close()
|
<commit_before><commit_msg>WIP: Add script to partition movielens dataset into a more dense selection.<commit_after>
import pandas as pd
import operator
df = pd.read_csv('../datasets/movielens-synthesized/ratings-synthesized-1m.csv')
counts = {}
user_ratings = df.groupby('user')
for k, df in user_ratings:
# do something with group
counts[k] = df['user'].count()
print "num users", len(counts)
sorted_x = sorted(counts.iteritems(), key=operator.itemgetter(0), reverse=True)
f = open('myfile','w')
for (user,count) in sorted_x[:400]:
for u, rating in user_ratings.set_index('user',inplace=True)[user]:
f.write('') # python will convert \n to os.linesep
f.close()
|
|
c7dc0a027d587f3c194c29f85f4a75db9bc2a44c
|
tests/rules_tests/grammarManipulation_tests/GetTest.py
|
tests/rules_tests/grammarManipulation_tests/GetTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule as _R, Grammar
from rules_tests.grammar import *
class GetTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_getAsArray(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp2, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, Tmp2, Tmp3])
def test_getAsArrayWithNone(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, None, Tmp3])
if __name__ == '__main__':
main()
|
Add tests of rule's get methods
|
Add tests of rule's get methods
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add tests of rule's get methods
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule as _R, Grammar
from rules_tests.grammar import *
class GetTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_getAsArray(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp2, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, Tmp2, Tmp3])
def test_getAsArrayWithNone(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, None, Tmp3])
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tests of rule's get methods<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule as _R, Grammar
from rules_tests.grammar import *
class GetTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_getAsArray(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp2, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, Tmp2, Tmp3])
def test_getAsArrayWithNone(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, None, Tmp3])
if __name__ == '__main__':
main()
|
Add tests of rule's get methods#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule as _R, Grammar
from rules_tests.grammar import *
class GetTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_getAsArray(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp2, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, Tmp2, Tmp3])
def test_getAsArrayWithNone(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, None, Tmp3])
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tests of rule's get methods<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule as _R, Grammar
from rules_tests.grammar import *
class GetTest(TestCase):
def __init__(self, *args):
super().__init__(*args)
self.g = Grammar()
def setUp(self):
g = Grammar()
g.add_term([0, 1, 2, 'a', 'b', 'c'])
g.add_nonterm([NFirst, NSecond, NThird, NFourth])
self.g = g
def test_getAsArray(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp2, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, Tmp2, Tmp3])
def test_getAsArrayWithNone(self):
class Tmp1(_R):
rule = ([NFirst], ['a', 0])
class Tmp2(_R):
rule = ([NSecond], ['a', 0, NFourth])
class Tmp3(_R):
rule = ([NThird], [0])
self.assertEqual(self.g.rules_count(), 0)
self.g.add_rule([Tmp1, Tmp3])
self.assertEqual(self.g.get_rule([Tmp1, Tmp2, Tmp3]), [Tmp1, None, Tmp3])
if __name__ == '__main__':
main()
|
|
ad7c3675f4e6f6b1e083f1308307498bff285e89
|
director/projects/migrations/0004_auto_20180723_1708.py
|
director/projects/migrations/0004_auto_20180723_1708.py
|
# Generated by Django 2.0.7 on 2018-07-23 17:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20180723_0245'),
]
operations = [
migrations.AlterUniqueTogether(
name='filesprojectfile',
unique_together=set(),
),
]
|
Add migration for altering unique_together
|
Add migration for altering unique_together
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
Add migration for altering unique_together
|
# Generated by Django 2.0.7 on 2018-07-23 17:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20180723_0245'),
]
operations = [
migrations.AlterUniqueTogether(
name='filesprojectfile',
unique_together=set(),
),
]
|
<commit_before><commit_msg>Add migration for altering unique_together<commit_after>
|
# Generated by Django 2.0.7 on 2018-07-23 17:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20180723_0245'),
]
operations = [
migrations.AlterUniqueTogether(
name='filesprojectfile',
unique_together=set(),
),
]
|
Add migration for altering unique_together# Generated by Django 2.0.7 on 2018-07-23 17:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20180723_0245'),
]
operations = [
migrations.AlterUniqueTogether(
name='filesprojectfile',
unique_together=set(),
),
]
|
<commit_before><commit_msg>Add migration for altering unique_together<commit_after># Generated by Django 2.0.7 on 2018-07-23 17:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20180723_0245'),
]
operations = [
migrations.AlterUniqueTogether(
name='filesprojectfile',
unique_together=set(),
),
]
|
|
b9ce512cb88cb5fddb4561d2edaaf20c715c5d94
|
tests/rules_tests/clearAfterNonTermRemove/WithEpsilonTest.py
|
tests/rules_tests/clearAfterNonTermRemove/WithEpsilonTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 17:05
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([A], [B, C]),
([A], [EPS]),
([B], [0, 1])]
class WithEpsilonTest(TestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B, C],
rules=[Rules])
def test_removeB(self):
self.assertEqual(self.g.rules_count(), 3)
self.g.remove_rule(B)
self.assertEqual(self.g.rules_count(), 1)
if __name__ == '__main__':
main()
|
Add test of rule with epsilon
|
Add test of rule with epsilon
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add test of rule with epsilon
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 17:05
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([A], [B, C]),
([A], [EPS]),
([B], [0, 1])]
class WithEpsilonTest(TestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B, C],
rules=[Rules])
def test_removeB(self):
self.assertEqual(self.g.rules_count(), 3)
self.g.remove_rule(B)
self.assertEqual(self.g.rules_count(), 1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test of rule with epsilon<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 17:05
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([A], [B, C]),
([A], [EPS]),
([B], [0, 1])]
class WithEpsilonTest(TestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B, C],
rules=[Rules])
def test_removeB(self):
self.assertEqual(self.g.rules_count(), 3)
self.g.remove_rule(B)
self.assertEqual(self.g.rules_count(), 1)
if __name__ == '__main__':
main()
|
Add test of rule with epsilon#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 17:05
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([A], [B, C]),
([A], [EPS]),
([B], [0, 1])]
class WithEpsilonTest(TestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B, C],
rules=[Rules])
def test_removeB(self):
self.assertEqual(self.g.rules_count(), 3)
self.g.remove_rule(B)
self.assertEqual(self.g.rules_count(), 1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test of rule with epsilon<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 17:05
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([A], [B, C]),
([A], [EPS]),
([B], [0, 1])]
class WithEpsilonTest(TestCase):
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B, C],
rules=[Rules])
def test_removeB(self):
self.assertEqual(self.g.rules_count(), 3)
self.g.remove_rule(B)
self.assertEqual(self.g.rules_count(), 1)
if __name__ == '__main__':
main()
|
|
3883775d12769a25a0bea5017741c53af14e20ab
|
sites/sandbox/apps/offers.py
|
sites/sandbox/apps/offers.py
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(offer, basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
Correct consume_items call for sandbox offer
|
Correct consume_items call for sandbox offer
|
Python
|
bsd-3-clause
|
faratro/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,lijoantony/django-oscar,QLGu/django-oscar,dongguangming/django-oscar,pasqualguerrero/django-oscar,nickpack/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,pdonadeo/django-oscar,spartonia/django-oscar,binarydud/django-oscar,binarydud/django-oscar,faratro/django-oscar,michaelkuty/django-oscar,DrOctogon/unwash_ecom,Idematica/django-oscar,bschuon/django-oscar,sasha0/django-oscar,thechampanurag/django-oscar,okfish/django-oscar,binarydud/django-oscar,elliotthill/django-oscar,jinnykoo/christmas,john-parton/django-oscar,Jannes123/django-oscar,pasqualguerrero/django-oscar,WillisXChen/django-oscar,bnprk/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj,Bogh/django-oscar,marcoantoniooliveira/labweb,monikasulik/django-oscar,solarissmoke/django-oscar,lijoantony/django-oscar,manevant/django-oscar,jlmadurga/django-oscar,nfletton/django-oscar,machtfit/django-oscar,kapari/django-oscar,sasha0/django-oscar,nickpack/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,solarissmoke/django-oscar,jinnykoo/wuyisj.com,spartonia/django-oscar,pdonadeo/django-oscar,Bogh/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,kapari/django-oscar,amirrpp/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,QLGu/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,mexeniz/django-oscar,kapt/django-oscar,jlmadurga/django-oscar,nickpack/django-oscar,MatthewWilkes/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,vovanbo/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj,mexeniz/django-oscar,anentropic/django-oscar,spartonia/django-oscar,kapt/django-oscar,okfish/django-oscar,ademuk/django-oscar,pasqualguerrero/django-oscar,thechampanurag/django-oscar,manevant/django-oscar,sonofatailor/django-oscar,monikasulik/django-oscar,nfletton/django-oscar,lijoantony/django-oscar,bschuon/django-oscar,nfletton/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,ademuk/django-oscar,itbabu/django-oscar,jmt4/django-oscar,ahmetdaglarbas/e-commerce,WadeYuChen/django-oscar,amirrpp/django-oscar,MatthewWilkes/django-oscar,adamend/django-oscar,ademuk/django-oscar,vovanbo/django-oscar,Idematica/django-oscar,WillisXChen/django-oscar,itbabu/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce,rocopartners/django-oscar,ka7eh/django-oscar,anentropic/django-oscar,elliotthill/django-oscar,adamend/django-oscar,josesanch/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,eddiep1101/django-oscar,kapari/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,amirrpp/django-oscar,bnprk/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,kapt/django-oscar,okfish/django-oscar,john-parton/django-oscar,machtfit/django-oscar,django-oscar/django-oscar,bschuon/django-oscar,okfish/django-oscar,bschuon/django-oscar,solarissmoke/django-oscar,ademuk/django-oscar,adamend/django-oscar,Bogh/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,saadatqadri/django-oscar,WadeYuChen/django-oscar,eddiep1101/django-oscar,MatthewWilkes/django-oscar,jinnykoo/christmas,john-parton/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,faratro/django-oscar,Idematica/django-oscar,taedori81/django-oscar,bnprk/django-oscar,jlmadurga/django-oscar,ahmetdaglarbas/e-commerce,rocopartners/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,ahmetdaglarbas/e-commerce,michaelkuty/django-oscar,binarydud/django-oscar,WadeYuChen/django-oscar,QLGu/django-oscar,jmt4/django-oscar,anentropic/django-oscar,thechampanurag/django-oscar,josesanch/django-oscar,ka7eh/django-oscar,faratro/django-oscar,taedori81/django-oscar,taedori81/django-oscar,taedori81/django-oscar,rocopartners/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,michaelkuty/django-oscar,vovanbo/django-oscar,jmt4/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,jinnykoo/wuyisj.com,QLGu/django-oscar,dongguangming/django-oscar,marcoantoniooliveira/labweb,jinnykoo/wuyisj,kapari/django-oscar,manevant/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,vovanbo/django-oscar,marcoantoniooliveira/labweb,nickpack/django-oscar,dongguangming/django-oscar,Jannes123/django-oscar,monikasulik/django-oscar,jinnykoo/wuyisj.com,DrOctogon/unwash_ecom,WillisXChen/django-oscar,machtfit/django-oscar,adamend/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,manevant/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,saadatqadri/django-oscar,sonofatailor/django-oscar,michaelkuty/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,pdonadeo/django-oscar,jinnykoo/christmas
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
Correct consume_items call for sandbox offer
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(offer, basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
<commit_before>from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
<commit_msg>Correct consume_items call for sandbox offer<commit_after>
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(offer, basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
Correct consume_items call for sandbox offerfrom oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(offer, basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
<commit_before>from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
<commit_msg>Correct consume_items call for sandbox offer<commit_after>from oscar.apps.offer import models
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(offer, basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Your name has been changed to Barry!"
@property
def description(self):
return "Changes owners name"
|
3d3ec162988ae93a3abd95ae32d855d6f3169fe5
|
src/collectors/UPSCollector/UPSCollector.py
|
src/collectors/UPSCollector/UPSCollector.py
|
from diamond import *
import diamond.collector
import subprocess
class UPSCollector(diamond.collector.Collector):
"""
This class collects data from NUT, a UPS interface for linux.
Requires: nut/upsc to be installed, configured and running.
"""
def get_default_config(self):
"""
Returns default collector settings.
"""
return {
'enabled': 'True',
'path': 'ups',
'ups_name': 'cyberpower'
}
def collect(self):
p = subprocess.Popen(['/bin/upsc', self.config['ups_name']], stdout=subprocess.PIPE)
for ln in p.communicate()[0].splitlines():
datapoint = ln.split(": ")
try:
val = float(datapoint[1])
except:
continue
if len(datapoint[0].split(".")) == 2:
# If the metric name is the same as the subfolder
# double it so it's visible.
name = ".".join([datapoint[0], datapoint[0].split(".")[1]])
else:
name = datapoint[0]
self.publish(name, val)
|
Add a collector to collect from 'NUT', the UPS daemon for Linux.
|
Add a collector to collect from 'NUT', the UPS daemon for Linux.
|
Python
|
mit
|
EzyInsights/Diamond,stuartbfox/Diamond,jumping/Diamond,TAKEALOT/Diamond,jumping/Diamond,tuenti/Diamond,Ssawa/Diamond,krbaker/Diamond,acquia/Diamond,datafiniti/Diamond,stuartbfox/Diamond,TAKEALOT/Diamond,EzyInsights/Diamond,zoidbergwill/Diamond,Basis/Diamond,hvnsweeting/Diamond,works-mobile/Diamond,dcsquared13/Diamond,codepython/Diamond,szibis/Diamond,Netuitive/netuitive-diamond,TinLe/Diamond,dcsquared13/Diamond,Netuitive/netuitive-diamond,timchenxiaoyu/Diamond,acquia/Diamond,CYBERBUGJR/Diamond,timchenxiaoyu/Diamond,jriguera/Diamond,Netuitive/Diamond,MichaelDoyle/Diamond,MichaelDoyle/Diamond,jumping/Diamond,eMerzh/Diamond-1,russss/Diamond,tellapart/Diamond,jaingaurav/Diamond,sebbrandt87/Diamond,socialwareinc/Diamond,timchenxiaoyu/Diamond,signalfx/Diamond,socialwareinc/Diamond,Nihn/Diamond-1,cannium/Diamond,szibis/Diamond,jriguera/Diamond,krbaker/Diamond,Slach/Diamond,jaingaurav/Diamond,Ssawa/Diamond,thardie/Diamond,datafiniti/Diamond,hamelg/Diamond,ceph/Diamond,python-diamond/Diamond,Clever/Diamond,ramjothikumar/Diamond,Clever/Diamond,sebbrandt87/Diamond,mfriedenhagen/Diamond,Basis/Diamond,tellapart/Diamond,mzupan/Diamond,TAKEALOT/Diamond,saucelabs/Diamond,rtoma/Diamond,stuartbfox/Diamond,tuenti/Diamond,rtoma/Diamond,anandbhoraskar/Diamond,eMerzh/Diamond-1,disqus/Diamond,dcsquared13/Diamond,krbaker/Diamond,socialwareinc/Diamond,janisz/Diamond-1,Ensighten/Diamond,joel-airspring/Diamond,bmhatfield/Diamond,actmd/Diamond,EzyInsights/Diamond,saucelabs/Diamond,Slach/Diamond,actmd/Diamond,TinLe/Diamond,bmhatfield/Diamond,Nihn/Diamond-1,janisz/Diamond-1,Netuitive/netuitive-diamond,metamx/Diamond,CYBERBUGJR/Diamond,Ssawa/Diamond,datafiniti/Diamond,Ormod/Diamond,eMerzh/Diamond-1,h00dy/Diamond,python-diamond/Diamond,ceph/Diamond,saucelabs/Diamond,russss/Diamond,Netuitive/Diamond,gg7/diamond,tuenti/Diamond,actmd/Diamond,Precis/Diamond,russss/Diamond,gg7/diamond,ceph/Diamond,gg7/diamond,Nihn/Diamond-1,works-mobile/Diamond,bmhatfield/Diamond,mfriedenhagen/Diamond,actmd/Diamond,mfriedenhagen/Diamond,MichaelDoyle/Diamond,mzupan/Diamond,tusharmakkar08/Diamond,Netuitive/netuitive-diamond,Ormod/Diamond,cannium/Diamond,Clever/Diamond,metamx/Diamond,codepython/Diamond,MediaMath/Diamond,Precis/Diamond,Precis/Diamond,thardie/Diamond,disqus/Diamond,sebbrandt87/Diamond,szibis/Diamond,janisz/Diamond-1,joel-airspring/Diamond,ramjothikumar/Diamond,TinLe/Diamond,tuenti/Diamond,stuartbfox/Diamond,Precis/Diamond,Basis/Diamond,h00dy/Diamond,CYBERBUGJR/Diamond,mfriedenhagen/Diamond,hvnsweeting/Diamond,Nihn/Diamond-1,joel-airspring/Diamond,Netuitive/Diamond,jaingaurav/Diamond,skbkontur/Diamond,Basis/Diamond,hamelg/Diamond,EzyInsights/Diamond,zoidbergwill/Diamond,TinLe/Diamond,rtoma/Diamond,tusharmakkar08/Diamond,skbkontur/Diamond,anandbhoraskar/Diamond,Ormod/Diamond,rtoma/Diamond,hamelg/Diamond,python-diamond/Diamond,eMerzh/Diamond-1,tusharmakkar08/Diamond,jumping/Diamond,zoidbergwill/Diamond,saucelabs/Diamond,skbkontur/Diamond,Ssawa/Diamond,jriguera/Diamond,cannium/Diamond,szibis/Diamond,tellapart/Diamond,krbaker/Diamond,bmhatfield/Diamond,Ormod/Diamond,cannium/Diamond,joel-airspring/Diamond,hvnsweeting/Diamond,jriguera/Diamond,Netuitive/Diamond,Clever/Diamond,hamelg/Diamond,thardie/Diamond,works-mobile/Diamond,h00dy/Diamond,anandbhoraskar/Diamond,datafiniti/Diamond,mzupan/Diamond,sebbrandt87/Diamond,dcsquared13/Diamond,signalfx/Diamond,Ensighten/Diamond,Ensighten/Diamond,janisz/Diamond-1,russss/Diamond,TAKEALOT/Diamond,hvnsweeting/Diamond,signalfx/Diamond,disqus/Diamond,Slach/Diamond,acquia/Diamond,socialwareinc/Diamond,jaingaurav/Diamond,CYBERBUGJR/Diamond,Ensighten/Diamond,MichaelDoyle/Diamond,acquia/Diamond,MediaMath/Diamond,timchenxiaoyu/Diamond,codepython/Diamond,MediaMath/Diamond,ramjothikumar/Diamond,zoidbergwill/Diamond,ramjothikumar/Diamond,tusharmakkar08/Diamond,tellapart/Diamond,signalfx/Diamond,thardie/Diamond,codepython/Diamond,anandbhoraskar/Diamond,skbkontur/Diamond,MediaMath/Diamond,gg7/diamond,h00dy/Diamond,mzupan/Diamond,metamx/Diamond,works-mobile/Diamond,Slach/Diamond,ceph/Diamond
|
Add a collector to collect from 'NUT', the UPS daemon for Linux.
|
from diamond import *
import diamond.collector
import subprocess
class UPSCollector(diamond.collector.Collector):
"""
This class collects data from NUT, a UPS interface for linux.
Requires: nut/upsc to be installed, configured and running.
"""
def get_default_config(self):
"""
Returns default collector settings.
"""
return {
'enabled': 'True',
'path': 'ups',
'ups_name': 'cyberpower'
}
def collect(self):
p = subprocess.Popen(['/bin/upsc', self.config['ups_name']], stdout=subprocess.PIPE)
for ln in p.communicate()[0].splitlines():
datapoint = ln.split(": ")
try:
val = float(datapoint[1])
except:
continue
if len(datapoint[0].split(".")) == 2:
# If the metric name is the same as the subfolder
# double it so it's visible.
name = ".".join([datapoint[0], datapoint[0].split(".")[1]])
else:
name = datapoint[0]
self.publish(name, val)
|
<commit_before><commit_msg>Add a collector to collect from 'NUT', the UPS daemon for Linux.<commit_after>
|
from diamond import *
import diamond.collector
import subprocess
class UPSCollector(diamond.collector.Collector):
"""
This class collects data from NUT, a UPS interface for linux.
Requires: nut/upsc to be installed, configured and running.
"""
def get_default_config(self):
"""
Returns default collector settings.
"""
return {
'enabled': 'True',
'path': 'ups',
'ups_name': 'cyberpower'
}
def collect(self):
p = subprocess.Popen(['/bin/upsc', self.config['ups_name']], stdout=subprocess.PIPE)
for ln in p.communicate()[0].splitlines():
datapoint = ln.split(": ")
try:
val = float(datapoint[1])
except:
continue
if len(datapoint[0].split(".")) == 2:
# If the metric name is the same as the subfolder
# double it so it's visible.
name = ".".join([datapoint[0], datapoint[0].split(".")[1]])
else:
name = datapoint[0]
self.publish(name, val)
|
Add a collector to collect from 'NUT', the UPS daemon for Linux.from diamond import *
import diamond.collector
import subprocess
class UPSCollector(diamond.collector.Collector):
"""
This class collects data from NUT, a UPS interface for linux.
Requires: nut/upsc to be installed, configured and running.
"""
def get_default_config(self):
"""
Returns default collector settings.
"""
return {
'enabled': 'True',
'path': 'ups',
'ups_name': 'cyberpower'
}
def collect(self):
p = subprocess.Popen(['/bin/upsc', self.config['ups_name']], stdout=subprocess.PIPE)
for ln in p.communicate()[0].splitlines():
datapoint = ln.split(": ")
try:
val = float(datapoint[1])
except:
continue
if len(datapoint[0].split(".")) == 2:
# If the metric name is the same as the subfolder
# double it so it's visible.
name = ".".join([datapoint[0], datapoint[0].split(".")[1]])
else:
name = datapoint[0]
self.publish(name, val)
|
<commit_before><commit_msg>Add a collector to collect from 'NUT', the UPS daemon for Linux.<commit_after>from diamond import *
import diamond.collector
import subprocess
class UPSCollector(diamond.collector.Collector):
"""
This class collects data from NUT, a UPS interface for linux.
Requires: nut/upsc to be installed, configured and running.
"""
def get_default_config(self):
"""
Returns default collector settings.
"""
return {
'enabled': 'True',
'path': 'ups',
'ups_name': 'cyberpower'
}
def collect(self):
p = subprocess.Popen(['/bin/upsc', self.config['ups_name']], stdout=subprocess.PIPE)
for ln in p.communicate()[0].splitlines():
datapoint = ln.split(": ")
try:
val = float(datapoint[1])
except:
continue
if len(datapoint[0].split(".")) == 2:
# If the metric name is the same as the subfolder
# double it so it's visible.
name = ".".join([datapoint[0], datapoint[0].split(".")[1]])
else:
name = datapoint[0]
self.publish(name, val)
|
|
115e280a00bbd64a97315c27c183756b2220790a
|
scripts/Reader_RDM6300.py
|
scripts/Reader_RDM6300.py
|
"""
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
"""
import RPi.GPIO as GPIO
import serial
import string
class Reader:
def __init__(self):
GPIO.setmode(GPIO.BCM)
self.rfid_serial = serial.Serial('/dev/ttyS0', 9600)
def readCard(self):
while True:
card_id = ''
read_byte = self.rfid_serial.read()
if read_byte == b'\x02':
while read_byte != b'\x03':
read_byte = self.rfid_serial.read()
card_id += read_byte.decode('utf-8')
card_id = ''.join(x for x in card_id if x in string.printable)
return card_id
|
Support for the RDM6300 serial RFID module
|
Support for the RDM6300 serial RFID module
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
|
Python
|
mit
|
MiczFlor/RPi-Jukebox-RFID,MiczFlor/RPi-Jukebox-RFID,MiczFlor/RPi-Jukebox-RFID,MiczFlor/RPi-Jukebox-RFID,MiczFlor/RPi-Jukebox-RFID
|
Support for the RDM6300 serial RFID module
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
|
"""
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
"""
import RPi.GPIO as GPIO
import serial
import string
class Reader:
def __init__(self):
GPIO.setmode(GPIO.BCM)
self.rfid_serial = serial.Serial('/dev/ttyS0', 9600)
def readCard(self):
while True:
card_id = ''
read_byte = self.rfid_serial.read()
if read_byte == b'\x02':
while read_byte != b'\x03':
read_byte = self.rfid_serial.read()
card_id += read_byte.decode('utf-8')
card_id = ''.join(x for x in card_id if x in string.printable)
return card_id
|
<commit_before><commit_msg>Support for the RDM6300 serial RFID module
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py<commit_after>
|
"""
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
"""
import RPi.GPIO as GPIO
import serial
import string
class Reader:
def __init__(self):
GPIO.setmode(GPIO.BCM)
self.rfid_serial = serial.Serial('/dev/ttyS0', 9600)
def readCard(self):
while True:
card_id = ''
read_byte = self.rfid_serial.read()
if read_byte == b'\x02':
while read_byte != b'\x03':
read_byte = self.rfid_serial.read()
card_id += read_byte.decode('utf-8')
card_id = ''.join(x for x in card_id if x in string.printable)
return card_id
|
Support for the RDM6300 serial RFID module
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py"""
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
"""
import RPi.GPIO as GPIO
import serial
import string
class Reader:
def __init__(self):
GPIO.setmode(GPIO.BCM)
self.rfid_serial = serial.Serial('/dev/ttyS0', 9600)
def readCard(self):
while True:
card_id = ''
read_byte = self.rfid_serial.read()
if read_byte == b'\x02':
while read_byte != b'\x03':
read_byte = self.rfid_serial.read()
card_id += read_byte.decode('utf-8')
card_id = ''.join(x for x in card_id if x in string.printable)
return card_id
|
<commit_before><commit_msg>Support for the RDM6300 serial RFID module
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py<commit_after>"""
Support for the RDM6300 serial RFID module
1.) Connect the RDM6300 module
------------------------------
Connect the RDM6300 module to the serial GPIO pins 14 and 15.
2.) Enable GPIO serial port
---------------------------
Edit the /boot/config.txt (sudo nano /boot/config.txt) and add the following line:
enable_uart=1
3.) Install dependecies
-----------------------
Be aware not to install the "serial" module, install "pyserial" instead and the RPi.GPIO module:
pip install pyserial RPi.GPIO
4.) Replace the default Reader.py
---------------------------------
Replace the Reader.py file with the Reader_RDM6300.py:
mv Reader.py Reader_default.py; mv Reader_RDM6300.py Reader.py
"""
import RPi.GPIO as GPIO
import serial
import string
class Reader:
def __init__(self):
GPIO.setmode(GPIO.BCM)
self.rfid_serial = serial.Serial('/dev/ttyS0', 9600)
def readCard(self):
while True:
card_id = ''
read_byte = self.rfid_serial.read()
if read_byte == b'\x02':
while read_byte != b'\x03':
read_byte = self.rfid_serial.read()
card_id += read_byte.decode('utf-8')
card_id = ''.join(x for x in card_id if x in string.printable)
return card_id
|
|
e382e25f47533fad12583e5a7e1213381a92751b
|
CsvToSepaDD.py
|
CsvToSepaDD.py
|
#!/usr/bin/env python
import argparse
import pprint
DEFAULT_CONFIG_FILE_NAME = 'CsvToSepaDD.config'
DEFAULT_CURRENCY = 'EUR'
def csvToSepa(args):
'''
[TODO] Converts the SEPA direct debit data from a given CSV file to SEPA XML
'''
pass
def createConfig(args):
'''Interactively creates a configuation file'''
fileName = raw_input('configuration file name [%s]: ' % DEFAULT_CONFIG_FILE_NAME)
if not fileName:
fileName = DEFAULT_CONFIG_FILE_NAME
name = raw_input('your name: ')
iban = raw_input('your IBAN: ')
bic = raw_input('your BIC: ')
creditorId = raw_input('your creditor id: ')
currency = raw_input('your currency [%s]: ' % DEFAULT_CURRENCY)
if not currency:
currency = DEFAULT_CURRENCY
config = {
'name': name,
'iban': iban,
'bic': bic,
'creditor_id': creditorId,
'currency': currency,
}
with open(fileName, 'w') as f:
pprint.pprint(config, stream=f, indent=4)
print 'Configuration written to %s. ' \
'You can edit this file with a text ' \
'editor if you need to change something later.' % fileName
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create SEPA XML direct debit files from CSV')
subparsers = parser.add_subparsers()
genConfigParser = subparsers.add_parser('genconfig', help='generate a configuration file')
genConfigParser.set_defaults(func=createConfig)
genConfigParser.add_argument('config', help='name of the configuration file')
convertParser = subparsers.add_parser('convert', help='convert a CSV file to a SEPA XML file')
convertParser.set_defaults(func=csvToSepa)
convertParser.add_argument('config', help='configuration file to use')
convertParser.add_argument('input', help='input file or - for stdin')
convertParser.add_argument('output', help='output file or - for stdout')
args = parser.parse_args()
args.func(args)
|
Add a first draft for the user interface
|
Add a first draft for the user interface
|
Python
|
bsd-3-clause
|
mfiedler/CsvToSepaDD
|
Add a first draft for the user interface
|
#!/usr/bin/env python
import argparse
import pprint
DEFAULT_CONFIG_FILE_NAME = 'CsvToSepaDD.config'
DEFAULT_CURRENCY = 'EUR'
def csvToSepa(args):
'''
[TODO] Converts the SEPA direct debit data from a given CSV file to SEPA XML
'''
pass
def createConfig(args):
'''Interactively creates a configuation file'''
fileName = raw_input('configuration file name [%s]: ' % DEFAULT_CONFIG_FILE_NAME)
if not fileName:
fileName = DEFAULT_CONFIG_FILE_NAME
name = raw_input('your name: ')
iban = raw_input('your IBAN: ')
bic = raw_input('your BIC: ')
creditorId = raw_input('your creditor id: ')
currency = raw_input('your currency [%s]: ' % DEFAULT_CURRENCY)
if not currency:
currency = DEFAULT_CURRENCY
config = {
'name': name,
'iban': iban,
'bic': bic,
'creditor_id': creditorId,
'currency': currency,
}
with open(fileName, 'w') as f:
pprint.pprint(config, stream=f, indent=4)
print 'Configuration written to %s. ' \
'You can edit this file with a text ' \
'editor if you need to change something later.' % fileName
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create SEPA XML direct debit files from CSV')
subparsers = parser.add_subparsers()
genConfigParser = subparsers.add_parser('genconfig', help='generate a configuration file')
genConfigParser.set_defaults(func=createConfig)
genConfigParser.add_argument('config', help='name of the configuration file')
convertParser = subparsers.add_parser('convert', help='convert a CSV file to a SEPA XML file')
convertParser.set_defaults(func=csvToSepa)
convertParser.add_argument('config', help='configuration file to use')
convertParser.add_argument('input', help='input file or - for stdin')
convertParser.add_argument('output', help='output file or - for stdout')
args = parser.parse_args()
args.func(args)
|
<commit_before><commit_msg>Add a first draft for the user interface<commit_after>
|
#!/usr/bin/env python
import argparse
import pprint
DEFAULT_CONFIG_FILE_NAME = 'CsvToSepaDD.config'
DEFAULT_CURRENCY = 'EUR'
def csvToSepa(args):
'''
[TODO] Converts the SEPA direct debit data from a given CSV file to SEPA XML
'''
pass
def createConfig(args):
'''Interactively creates a configuation file'''
fileName = raw_input('configuration file name [%s]: ' % DEFAULT_CONFIG_FILE_NAME)
if not fileName:
fileName = DEFAULT_CONFIG_FILE_NAME
name = raw_input('your name: ')
iban = raw_input('your IBAN: ')
bic = raw_input('your BIC: ')
creditorId = raw_input('your creditor id: ')
currency = raw_input('your currency [%s]: ' % DEFAULT_CURRENCY)
if not currency:
currency = DEFAULT_CURRENCY
config = {
'name': name,
'iban': iban,
'bic': bic,
'creditor_id': creditorId,
'currency': currency,
}
with open(fileName, 'w') as f:
pprint.pprint(config, stream=f, indent=4)
print 'Configuration written to %s. ' \
'You can edit this file with a text ' \
'editor if you need to change something later.' % fileName
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create SEPA XML direct debit files from CSV')
subparsers = parser.add_subparsers()
genConfigParser = subparsers.add_parser('genconfig', help='generate a configuration file')
genConfigParser.set_defaults(func=createConfig)
genConfigParser.add_argument('config', help='name of the configuration file')
convertParser = subparsers.add_parser('convert', help='convert a CSV file to a SEPA XML file')
convertParser.set_defaults(func=csvToSepa)
convertParser.add_argument('config', help='configuration file to use')
convertParser.add_argument('input', help='input file or - for stdin')
convertParser.add_argument('output', help='output file or - for stdout')
args = parser.parse_args()
args.func(args)
|
Add a first draft for the user interface#!/usr/bin/env python
import argparse
import pprint
DEFAULT_CONFIG_FILE_NAME = 'CsvToSepaDD.config'
DEFAULT_CURRENCY = 'EUR'
def csvToSepa(args):
'''
[TODO] Converts the SEPA direct debit data from a given CSV file to SEPA XML
'''
pass
def createConfig(args):
'''Interactively creates a configuation file'''
fileName = raw_input('configuration file name [%s]: ' % DEFAULT_CONFIG_FILE_NAME)
if not fileName:
fileName = DEFAULT_CONFIG_FILE_NAME
name = raw_input('your name: ')
iban = raw_input('your IBAN: ')
bic = raw_input('your BIC: ')
creditorId = raw_input('your creditor id: ')
currency = raw_input('your currency [%s]: ' % DEFAULT_CURRENCY)
if not currency:
currency = DEFAULT_CURRENCY
config = {
'name': name,
'iban': iban,
'bic': bic,
'creditor_id': creditorId,
'currency': currency,
}
with open(fileName, 'w') as f:
pprint.pprint(config, stream=f, indent=4)
print 'Configuration written to %s. ' \
'You can edit this file with a text ' \
'editor if you need to change something later.' % fileName
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create SEPA XML direct debit files from CSV')
subparsers = parser.add_subparsers()
genConfigParser = subparsers.add_parser('genconfig', help='generate a configuration file')
genConfigParser.set_defaults(func=createConfig)
genConfigParser.add_argument('config', help='name of the configuration file')
convertParser = subparsers.add_parser('convert', help='convert a CSV file to a SEPA XML file')
convertParser.set_defaults(func=csvToSepa)
convertParser.add_argument('config', help='configuration file to use')
convertParser.add_argument('input', help='input file or - for stdin')
convertParser.add_argument('output', help='output file or - for stdout')
args = parser.parse_args()
args.func(args)
|
<commit_before><commit_msg>Add a first draft for the user interface<commit_after>#!/usr/bin/env python
import argparse
import pprint
DEFAULT_CONFIG_FILE_NAME = 'CsvToSepaDD.config'
DEFAULT_CURRENCY = 'EUR'
def csvToSepa(args):
'''
[TODO] Converts the SEPA direct debit data from a given CSV file to SEPA XML
'''
pass
def createConfig(args):
'''Interactively creates a configuation file'''
fileName = raw_input('configuration file name [%s]: ' % DEFAULT_CONFIG_FILE_NAME)
if not fileName:
fileName = DEFAULT_CONFIG_FILE_NAME
name = raw_input('your name: ')
iban = raw_input('your IBAN: ')
bic = raw_input('your BIC: ')
creditorId = raw_input('your creditor id: ')
currency = raw_input('your currency [%s]: ' % DEFAULT_CURRENCY)
if not currency:
currency = DEFAULT_CURRENCY
config = {
'name': name,
'iban': iban,
'bic': bic,
'creditor_id': creditorId,
'currency': currency,
}
with open(fileName, 'w') as f:
pprint.pprint(config, stream=f, indent=4)
print 'Configuration written to %s. ' \
'You can edit this file with a text ' \
'editor if you need to change something later.' % fileName
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create SEPA XML direct debit files from CSV')
subparsers = parser.add_subparsers()
genConfigParser = subparsers.add_parser('genconfig', help='generate a configuration file')
genConfigParser.set_defaults(func=createConfig)
genConfigParser.add_argument('config', help='name of the configuration file')
convertParser = subparsers.add_parser('convert', help='convert a CSV file to a SEPA XML file')
convertParser.set_defaults(func=csvToSepa)
convertParser.add_argument('config', help='configuration file to use')
convertParser.add_argument('input', help='input file or - for stdin')
convertParser.add_argument('output', help='output file or - for stdout')
args = parser.parse_args()
args.func(args)
|
|
573878b5609fb6badae40e4a1bdef944b9d340dc
|
tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
|
tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'), '--video', '--file',
self._output_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
Fix screen recording with multiple connected devices
|
telemetry: Fix screen recording with multiple connected devices
Make it possible to use the Android screen recording profiler with
multiple connected devices. Only the screen on the device that is
actually running the telemetry test will get recorded.
BUG=331435
TEST=tools/perf/run_benchmark smoothness.key_mobile_sites \
--page-filter=linus --browser=android-content-shell \
--profiler=android-screen-recorder
NOTRY=true
Review URL: https://codereview.chromium.org/119323008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@243080 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,anirudhSK/chromium,ChromiumWebApps/chromium,M4sse/chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,dednal/chromium.src,dednal/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,Just-D/chromium-1,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,jaruba/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,jaruba/chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,ltilve/chromium,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,dednal/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,dushu1203/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,jaruba/chromium.src,ltilve/chromium,dednal/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'), '--video', '--file',
self._output_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
telemetry: Fix screen recording with multiple connected devices
Make it possible to use the Android screen recording profiler with
multiple connected devices. Only the screen on the device that is
actually running the telemetry test will get recorded.
BUG=331435
TEST=tools/perf/run_benchmark smoothness.key_mobile_sites \
--page-filter=linus --browser=android-content-shell \
--profiler=android-screen-recorder
NOTRY=true
Review URL: https://codereview.chromium.org/119323008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@243080 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'), '--video', '--file',
self._output_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
<commit_msg>telemetry: Fix screen recording with multiple connected devices
Make it possible to use the Android screen recording profiler with
multiple connected devices. Only the screen on the device that is
actually running the telemetry test will get recorded.
BUG=331435
TEST=tools/perf/run_benchmark smoothness.key_mobile_sites \
--page-filter=linus --browser=android-content-shell \
--profiler=android-screen-recorder
NOTRY=true
Review URL: https://codereview.chromium.org/119323008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@243080 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'), '--video', '--file',
self._output_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
telemetry: Fix screen recording with multiple connected devices
Make it possible to use the Android screen recording profiler with
multiple connected devices. Only the screen on the device that is
actually running the telemetry test will get recorded.
BUG=331435
TEST=tools/perf/run_benchmark smoothness.key_mobile_sites \
--page-filter=linus --browser=android-content-shell \
--profiler=android-screen-recorder
NOTRY=true
Review URL: https://codereview.chromium.org/119323008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@243080 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'), '--video', '--file',
self._output_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
<commit_msg>telemetry: Fix screen recording with multiple connected devices
Make it possible to use the Android screen recording profiler with
multiple connected devices. Only the screen on the device that is
actually running the telemetry test will get recorded.
BUG=331435
TEST=tools/perf/run_benchmark smoothness.key_mobile_sites \
--page-filter=linus --browser=android-content-shell \
--profiler=android-screen-recorder
NOTRY=true
Review URL: https://codereview.chromium.org/119323008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@243080 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.core.platform import profiler
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
self._recorder.wait()
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
301169a141b56a1a74b0c668e2b33083d593acdd
|
app/models/baseModel.py
|
app/models/baseModel.py
|
from app import db
class BaseModel(db.Model):
''' A model detailing the base properties to be inherited '''
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def delete(self):
'''delete data from database'''
db.session.delete(self)
db.session.commit()
def __init__(self, name):
'''initialize with name'''
self.name = name
def save(self):
'''save data to database'''
db.session.add(self)
db.session.commit()
@staticmethod
def __item_exists():
pass
|
Add Base Model This model is to be used by other model classes to inherit some traits
|
Add Base Model
This model is to be used by other model classes to inherit some traits
|
Python
|
mit
|
Elbertbiggs360/buckelist-api
|
Add Base Model
This model is to be used by other model classes to inherit some traits
|
from app import db
class BaseModel(db.Model):
''' A model detailing the base properties to be inherited '''
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def delete(self):
'''delete data from database'''
db.session.delete(self)
db.session.commit()
def __init__(self, name):
'''initialize with name'''
self.name = name
def save(self):
'''save data to database'''
db.session.add(self)
db.session.commit()
@staticmethod
def __item_exists():
pass
|
<commit_before><commit_msg>Add Base Model
This model is to be used by other model classes to inherit some traits<commit_after>
|
from app import db
class BaseModel(db.Model):
''' A model detailing the base properties to be inherited '''
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def delete(self):
'''delete data from database'''
db.session.delete(self)
db.session.commit()
def __init__(self, name):
'''initialize with name'''
self.name = name
def save(self):
'''save data to database'''
db.session.add(self)
db.session.commit()
@staticmethod
def __item_exists():
pass
|
Add Base Model
This model is to be used by other model classes to inherit some traitsfrom app import db
class BaseModel(db.Model):
''' A model detailing the base properties to be inherited '''
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def delete(self):
'''delete data from database'''
db.session.delete(self)
db.session.commit()
def __init__(self, name):
'''initialize with name'''
self.name = name
def save(self):
'''save data to database'''
db.session.add(self)
db.session.commit()
@staticmethod
def __item_exists():
pass
|
<commit_before><commit_msg>Add Base Model
This model is to be used by other model classes to inherit some traits<commit_after>from app import db
class BaseModel(db.Model):
''' A model detailing the base properties to be inherited '''
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(
db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def delete(self):
'''delete data from database'''
db.session.delete(self)
db.session.commit()
def __init__(self, name):
'''initialize with name'''
self.name = name
def save(self):
'''save data to database'''
db.session.add(self)
db.session.commit()
@staticmethod
def __item_exists():
pass
|
|
cf3c624d5f5224c22f018c683dfc12bdc7b2a348
|
rename_file.py
|
rename_file.py
|
import sublime
import sublime_plugin
import os
import functools
class RenameFileCommand(sublime_plugin.WindowCommand):
def run(self, paths):
if paths[0] == "$file":
paths[0] = self.window.active_view().file_name()
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
|
Add Initial version of plugin.
|
Add Initial version of plugin.
|
Python
|
mit
|
ishu3101/RenameFile
|
Add Initial version of plugin.
|
import sublime
import sublime_plugin
import os
import functools
class RenameFileCommand(sublime_plugin.WindowCommand):
def run(self, paths):
if paths[0] == "$file":
paths[0] = self.window.active_view().file_name()
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
|
<commit_before><commit_msg>Add Initial version of plugin.<commit_after>
|
import sublime
import sublime_plugin
import os
import functools
class RenameFileCommand(sublime_plugin.WindowCommand):
def run(self, paths):
if paths[0] == "$file":
paths[0] = self.window.active_view().file_name()
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
|
Add Initial version of plugin.import sublime
import sublime_plugin
import os
import functools
class RenameFileCommand(sublime_plugin.WindowCommand):
def run(self, paths):
if paths[0] == "$file":
paths[0] = self.window.active_view().file_name()
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
|
<commit_before><commit_msg>Add Initial version of plugin.<commit_after>import sublime
import sublime_plugin
import os
import functools
class RenameFileCommand(sublime_plugin.WindowCommand):
def run(self, paths):
if paths[0] == "$file":
paths[0] = self.window.active_view().file_name()
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
|
|
219b600773f6bf93b2fc61edc837fa3e1af086dd
|
tools/let2def.py
|
tools/let2def.py
|
#!/usr/bin/env python3
#
# Usage: let2def.py prog.fut
#
# Replaces top-level 'let's with 'def'.
from subprocess import check_output
import sys
import re
import os
prog = sys.argv[1]
out = check_output(["futhark", "defs", prog]).decode('utf-8')
funlocs = set()
for line in out.split('\n'):
m = re.match('value [^ ]+ ([^:]+):([0-9]+):([0-9]+)', line)
if m and m[1] == prog:
funlocs.add((int(m[2]), int(m[3])))
f = open(prog, 'r+')
s = list(f.read())
line=1
col=0
n = len(s)
for i in range(n):
if s[i] == '\n':
line += 1
col = 0
elif s[i] == '\t':
# Futhark lexer assumes tabwidth=8 for source positions.
col += 1
while (col % 8 != 0):
col += 1
else:
col += 1
if s[i:i+3] == list('let') and (line,col) in funlocs:
s[i:i+3] = 'def'
f.seek(0)
f.truncate()
f.write(''.join(s))
|
Add tool for converting "let" to "def".
|
Add tool for converting "let" to "def".
|
Python
|
isc
|
HIPERFIT/futhark,HIPERFIT/futhark,diku-dk/futhark,HIPERFIT/futhark,diku-dk/futhark,diku-dk/futhark,diku-dk/futhark,diku-dk/futhark
|
Add tool for converting "let" to "def".
|
#!/usr/bin/env python3
#
# Usage: let2def.py prog.fut
#
# Replaces top-level 'let's with 'def'.
from subprocess import check_output
import sys
import re
import os
prog = sys.argv[1]
out = check_output(["futhark", "defs", prog]).decode('utf-8')
funlocs = set()
for line in out.split('\n'):
m = re.match('value [^ ]+ ([^:]+):([0-9]+):([0-9]+)', line)
if m and m[1] == prog:
funlocs.add((int(m[2]), int(m[3])))
f = open(prog, 'r+')
s = list(f.read())
line=1
col=0
n = len(s)
for i in range(n):
if s[i] == '\n':
line += 1
col = 0
elif s[i] == '\t':
# Futhark lexer assumes tabwidth=8 for source positions.
col += 1
while (col % 8 != 0):
col += 1
else:
col += 1
if s[i:i+3] == list('let') and (line,col) in funlocs:
s[i:i+3] = 'def'
f.seek(0)
f.truncate()
f.write(''.join(s))
|
<commit_before><commit_msg>Add tool for converting "let" to "def".<commit_after>
|
#!/usr/bin/env python3
#
# Usage: let2def.py prog.fut
#
# Replaces top-level 'let's with 'def'.
from subprocess import check_output
import sys
import re
import os
prog = sys.argv[1]
out = check_output(["futhark", "defs", prog]).decode('utf-8')
funlocs = set()
for line in out.split('\n'):
m = re.match('value [^ ]+ ([^:]+):([0-9]+):([0-9]+)', line)
if m and m[1] == prog:
funlocs.add((int(m[2]), int(m[3])))
f = open(prog, 'r+')
s = list(f.read())
line=1
col=0
n = len(s)
for i in range(n):
if s[i] == '\n':
line += 1
col = 0
elif s[i] == '\t':
# Futhark lexer assumes tabwidth=8 for source positions.
col += 1
while (col % 8 != 0):
col += 1
else:
col += 1
if s[i:i+3] == list('let') and (line,col) in funlocs:
s[i:i+3] = 'def'
f.seek(0)
f.truncate()
f.write(''.join(s))
|
Add tool for converting "let" to "def".#!/usr/bin/env python3
#
# Usage: let2def.py prog.fut
#
# Replaces top-level 'let's with 'def'.
from subprocess import check_output
import sys
import re
import os
prog = sys.argv[1]
out = check_output(["futhark", "defs", prog]).decode('utf-8')
funlocs = set()
for line in out.split('\n'):
m = re.match('value [^ ]+ ([^:]+):([0-9]+):([0-9]+)', line)
if m and m[1] == prog:
funlocs.add((int(m[2]), int(m[3])))
f = open(prog, 'r+')
s = list(f.read())
line=1
col=0
n = len(s)
for i in range(n):
if s[i] == '\n':
line += 1
col = 0
elif s[i] == '\t':
# Futhark lexer assumes tabwidth=8 for source positions.
col += 1
while (col % 8 != 0):
col += 1
else:
col += 1
if s[i:i+3] == list('let') and (line,col) in funlocs:
s[i:i+3] = 'def'
f.seek(0)
f.truncate()
f.write(''.join(s))
|
<commit_before><commit_msg>Add tool for converting "let" to "def".<commit_after>#!/usr/bin/env python3
#
# Usage: let2def.py prog.fut
#
# Replaces top-level 'let's with 'def'.
from subprocess import check_output
import sys
import re
import os
prog = sys.argv[1]
out = check_output(["futhark", "defs", prog]).decode('utf-8')
funlocs = set()
for line in out.split('\n'):
m = re.match('value [^ ]+ ([^:]+):([0-9]+):([0-9]+)', line)
if m and m[1] == prog:
funlocs.add((int(m[2]), int(m[3])))
f = open(prog, 'r+')
s = list(f.read())
line=1
col=0
n = len(s)
for i in range(n):
if s[i] == '\n':
line += 1
col = 0
elif s[i] == '\t':
# Futhark lexer assumes tabwidth=8 for source positions.
col += 1
while (col % 8 != 0):
col += 1
else:
col += 1
if s[i:i+3] == list('let') and (line,col) in funlocs:
s[i:i+3] = 'def'
f.seek(0)
f.truncate()
f.write(''.join(s))
|
|
a3a70348fb8eed7e63f68ea67246bf85c4eb0097
|
sal/urls.py
|
sal/urls.py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
Update password functions for Django 1.8
|
Update password functions for Django 1.8
|
Python
|
apache-2.0
|
chasetb/sal,chasetb/sal,macjustice/sal,sheagcraig/sal,erikng/sal,erikng/sal,chasetb/sal,salopensource/sal,macjustice/sal,salopensource/sal,macjustice/sal,sheagcraig/sal,erikng/sal,salopensource/sal,chasetb/sal,sheagcraig/sal,macjustice/sal,erikng/sal,sheagcraig/sal,salopensource/sal
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
Update password functions for Django 1.8
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
<commit_msg>Update password functions for Django 1.8<commit_after>
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
Update password functions for Django 1.8from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
<commit_msg>Update password functions for Django 1.8<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
url(r'^', include('server.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls'))
#url(r'^$', 'namer.views.index', name='home'),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if 'config' in settings.INSTALLED_APPS:
config_pattern = patterns('',
url(r'^config/', include('config.urls'))
)
urlpatterns += config_pattern
|
c535c2d4bfd231b5999b3a974b24a91a76325300
|
utils/metrics.py
|
utils/metrics.py
|
"""Metrics to compute the model performance."""
import numpy as np
def dcg_score(y_true, y_pred, k=5):
"""Discounted Cumulative Gain (DCG) at rank K.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> dcg(y_true, y_pred)
"""
score = 0
for y_true_value, y_pred_array in zip(y_true, y_pred):
for i in xrange(min(len(y_pred_array), k)):
numerator = 2**(y_true_value == y_pred_array[i]) - 1
denominator = np.log2(i + 1 + 1)
score += numerator / denominator
return score
def ndcg_score(y_true, y_pred, k=5):
"""Normalized Discounted Cumulative Gain (NDCG) at rank K.
Normalized Discounted Cumulative Gain (NDCG) measures the performance of a
recommendation system based on the graded relevance of the recommended
entities. It varies from 0.0 to 1.0, with 1.0 representing the ideal
ranking of the entities.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> ndcg(y_true, y_pred))
"""
# TODO: Best score is always len(y_true)?
best = dcg_score(y_true, [[i] for i in y_true])
actual = dcg_score(y_true, y_pred, k)
return actual / best
print ndcg_score(['FR', 'GR'], [['FR', 'ES', 'PT'], ['US', 'GR']])
|
Add functions to measure performance
|
Add functions to measure performance
|
Python
|
mit
|
davidgasquez/kaggle-airbnb
|
Add functions to measure performance
|
"""Metrics to compute the model performance."""
import numpy as np
def dcg_score(y_true, y_pred, k=5):
"""Discounted Cumulative Gain (DCG) at rank K.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> dcg(y_true, y_pred)
"""
score = 0
for y_true_value, y_pred_array in zip(y_true, y_pred):
for i in xrange(min(len(y_pred_array), k)):
numerator = 2**(y_true_value == y_pred_array[i]) - 1
denominator = np.log2(i + 1 + 1)
score += numerator / denominator
return score
def ndcg_score(y_true, y_pred, k=5):
"""Normalized Discounted Cumulative Gain (NDCG) at rank K.
Normalized Discounted Cumulative Gain (NDCG) measures the performance of a
recommendation system based on the graded relevance of the recommended
entities. It varies from 0.0 to 1.0, with 1.0 representing the ideal
ranking of the entities.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> ndcg(y_true, y_pred))
"""
# TODO: Best score is always len(y_true)?
best = dcg_score(y_true, [[i] for i in y_true])
actual = dcg_score(y_true, y_pred, k)
return actual / best
print ndcg_score(['FR', 'GR'], [['FR', 'ES', 'PT'], ['US', 'GR']])
|
<commit_before><commit_msg>Add functions to measure performance<commit_after>
|
"""Metrics to compute the model performance."""
import numpy as np
def dcg_score(y_true, y_pred, k=5):
"""Discounted Cumulative Gain (DCG) at rank K.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> dcg(y_true, y_pred)
"""
score = 0
for y_true_value, y_pred_array in zip(y_true, y_pred):
for i in xrange(min(len(y_pred_array), k)):
numerator = 2**(y_true_value == y_pred_array[i]) - 1
denominator = np.log2(i + 1 + 1)
score += numerator / denominator
return score
def ndcg_score(y_true, y_pred, k=5):
"""Normalized Discounted Cumulative Gain (NDCG) at rank K.
Normalized Discounted Cumulative Gain (NDCG) measures the performance of a
recommendation system based on the graded relevance of the recommended
entities. It varies from 0.0 to 1.0, with 1.0 representing the ideal
ranking of the entities.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> ndcg(y_true, y_pred))
"""
# TODO: Best score is always len(y_true)?
best = dcg_score(y_true, [[i] for i in y_true])
actual = dcg_score(y_true, y_pred, k)
return actual / best
print ndcg_score(['FR', 'GR'], [['FR', 'ES', 'PT'], ['US', 'GR']])
|
Add functions to measure performance"""Metrics to compute the model performance."""
import numpy as np
def dcg_score(y_true, y_pred, k=5):
"""Discounted Cumulative Gain (DCG) at rank K.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> dcg(y_true, y_pred)
"""
score = 0
for y_true_value, y_pred_array in zip(y_true, y_pred):
for i in xrange(min(len(y_pred_array), k)):
numerator = 2**(y_true_value == y_pred_array[i]) - 1
denominator = np.log2(i + 1 + 1)
score += numerator / denominator
return score
def ndcg_score(y_true, y_pred, k=5):
"""Normalized Discounted Cumulative Gain (NDCG) at rank K.
Normalized Discounted Cumulative Gain (NDCG) measures the performance of a
recommendation system based on the graded relevance of the recommended
entities. It varies from 0.0 to 1.0, with 1.0 representing the ideal
ranking of the entities.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> ndcg(y_true, y_pred))
"""
# TODO: Best score is always len(y_true)?
best = dcg_score(y_true, [[i] for i in y_true])
actual = dcg_score(y_true, y_pred, k)
return actual / best
print ndcg_score(['FR', 'GR'], [['FR', 'ES', 'PT'], ['US', 'GR']])
|
<commit_before><commit_msg>Add functions to measure performance<commit_after>"""Metrics to compute the model performance."""
import numpy as np
def dcg_score(y_true, y_pred, k=5):
"""Discounted Cumulative Gain (DCG) at rank K.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> dcg(y_true, y_pred)
"""
score = 0
for y_true_value, y_pred_array in zip(y_true, y_pred):
for i in xrange(min(len(y_pred_array), k)):
numerator = 2**(y_true_value == y_pred_array[i]) - 1
denominator = np.log2(i + 1 + 1)
score += numerator / denominator
return score
def ndcg_score(y_true, y_pred, k=5):
"""Normalized Discounted Cumulative Gain (NDCG) at rank K.
Normalized Discounted Cumulative Gain (NDCG) measures the performance of a
recommendation system based on the graded relevance of the recommended
entities. It varies from 0.0 to 1.0, with 1.0 representing the ideal
ranking of the entities.
Parameters
----------
y_true : 1d array-like, shape = [n_samples]
Ground truth (correct) labels.
y_pred : 2d array-like, shape = [n_samples, k]
Predicted scores.
k : int
Rank.
Returns
-------
score : float
Examples
--------
>>> y_true = ['FR', 'GR']
>>> y_pred = [['FR', 'ES', 'PT'], ['US', 'GR']]
>>> ndcg(y_true, y_pred))
"""
# TODO: Best score is always len(y_true)?
best = dcg_score(y_true, [[i] for i in y_true])
actual = dcg_score(y_true, y_pred, k)
return actual / best
print ndcg_score(['FR', 'GR'], [['FR', 'ES', 'PT'], ['US', 'GR']])
|
|
b390a4381d7c495f1fad91248194799f3325c3d5
|
billjobs/serializers.py
|
billjobs/serializers.py
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
|
Add serializer for User model
|
Add serializer for User model
|
Python
|
mit
|
ioO/billjobs
|
Add serializer for User model
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
|
<commit_before><commit_msg>Add serializer for User model<commit_after>
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
|
Add serializer for User modelfrom django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
|
<commit_before><commit_msg>Add serializer for User model<commit_after>from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
|
|
db863be10cca5d43e37cace88fd2a500f6ee96f8
|
rest_framework/authtoken/admin.py
|
rest_framework/authtoken/admin.py
|
from django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
admin.site.register(Token, TokenAdmin)
|
Add an ModelAdmin for easy management of Tokens
|
Add an ModelAdmin for easy management of Tokens
|
Python
|
bsd-2-clause
|
simudream/django-rest-framework,xiaotangyuan/django-rest-framework,xiaotangyuan/django-rest-framework,wangpanjun/django-rest-framework,arpheno/django-rest-framework,antonyc/django-rest-framework,raphaelmerx/django-rest-framework,mgaitan/django-rest-framework,d0ugal/django-rest-framework,davesque/django-rest-framework,brandoncazander/django-rest-framework,werthen/django-rest-framework,rafaelang/django-rest-framework,thedrow/django-rest-framework-1,delinhabit/django-rest-framework,rhblind/django-rest-framework,tcroiset/django-rest-framework,antonyc/django-rest-framework,rafaelcaricio/django-rest-framework,alacritythief/django-rest-framework,vstoykov/django-rest-framework,andriy-s/django-rest-framework,VishvajitP/django-rest-framework,rhblind/django-rest-framework,waytai/django-rest-framework,James1345/django-rest-framework,linovia/django-rest-framework,brandoncazander/django-rest-framework,agconti/django-rest-framework,lubomir/django-rest-framework,kgeorgy/django-rest-framework,ezheidtmann/django-rest-framework,sehmaschine/django-rest-framework,abdulhaq-e/django-rest-framework,ambivalentno/django-rest-framework,HireAnEsquire/django-rest-framework,johnraz/django-rest-framework,dmwyatt/django-rest-framework,VishvajitP/django-rest-framework,kylefox/django-rest-framework,aericson/django-rest-framework,gregmuellegger/django-rest-framework,jerryhebert/django-rest-framework,leeahoward/django-rest-framework,rhblind/django-rest-framework,ebsaral/django-rest-framework,douwevandermeij/django-rest-framework,hunter007/django-rest-framework,James1345/django-rest-framework,James1345/django-rest-framework,brandoncazander/django-rest-framework,uploadcare/django-rest-framework,leeahoward/django-rest-framework,vstoykov/django-rest-framework,buptlsl/django-rest-framework,kezabelle/django-rest-framework,fishky/django-rest-framework,qsorix/django-rest-framework,potpath/django-rest-framework,edx/django-rest-framework,uruz/django-rest-framework,atombrella/django-rest-framework,tcroiset/django-rest-framework,pombredanne/django-rest-framework,cheif/django-rest-framework,kylefox/django-rest-framework,uploadcare/django-rest-framework,elim/django-rest-framework,waytai/django-rest-framework,callorico/django-rest-framework,nhorelik/django-rest-framework,jpulec/django-rest-framework,uploadcare/django-rest-framework,tcroiset/django-rest-framework,sheppard/django-rest-framework,sbellem/django-rest-framework,zeldalink0515/django-rest-framework,dmwyatt/django-rest-framework,hnakamur/django-rest-framework,wedaly/django-rest-framework,alacritythief/django-rest-framework,AlexandreProenca/django-rest-framework,AlexandreProenca/django-rest-framework,akalipetis/django-rest-framework,dmwyatt/django-rest-framework,callorico/django-rest-framework,canassa/django-rest-framework,ajaali/django-rest-framework,simudream/django-rest-framework,hnarayanan/django-rest-framework,abdulhaq-e/django-rest-framework,ossanna16/django-rest-framework,werthen/django-rest-framework,damycra/django-rest-framework,thedrow/django-rest-framework-1,wedaly/django-rest-framework,sehmaschine/django-rest-framework,jtiai/django-rest-framework,wangpanjun/django-rest-framework,kennydude/django-rest-framework,canassa/django-rest-framework,uruz/django-rest-framework,ebsaral/django-rest-framework,yiyocx/django-rest-framework,hnarayanan/django-rest-framework,kennydude/django-rest-framework,krinart/django-rest-framework,qsorix/django-rest-framework,paolopaolopaolo/django-rest-framework,ashishfinoit/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,adambain-vokal/django-rest-framework,linovia/django-rest-framework,d0ugal/django-rest-framework,uruz/django-rest-framework,akalipetis/django-rest-framework,kennydude/django-rest-framework,ezheidtmann/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,tomchristie/django-rest-framework,agconti/django-rest-framework,d0ugal/django-rest-framework,rafaelcaricio/django-rest-framework,adambain-vokal/django-rest-framework,hnarayanan/django-rest-framework,nhorelik/django-rest-framework,delinhabit/django-rest-framework,sbellem/django-rest-framework,ajaali/django-rest-framework,potpath/django-rest-framework,simudream/django-rest-framework,rubendura/django-rest-framework,jerryhebert/django-rest-framework,wwj718/django-rest-framework,aericson/django-rest-framework,wzbozon/django-rest-framework,douwevandermeij/django-rest-framework,kezabelle/django-rest-framework,lubomir/django-rest-framework,nryoung/django-rest-framework,bluedazzle/django-rest-framework,edx/django-rest-framework,YBJAY00000/django-rest-framework,tomchristie/django-rest-framework,potpath/django-rest-framework,VishvajitP/django-rest-framework,waytai/django-rest-framework,yiyocx/django-rest-framework,tigeraniya/django-rest-framework,hnakamur/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,andriy-s/django-rest-framework,johnraz/django-rest-framework,mgaitan/django-rest-framework,buptlsl/django-rest-framework,wzbozon/django-rest-framework,nryoung/django-rest-framework,kylefox/django-rest-framework,wwj718/django-rest-framework,jness/django-rest-framework,wzbozon/django-rest-framework,sehmaschine/django-rest-framework,linovia/django-rest-framework,YBJAY00000/django-rest-framework,wangpanjun/django-rest-framework,cyberj/django-rest-framework,rubendura/django-rest-framework,paolopaolopaolo/django-rest-framework,andriy-s/django-rest-framework,nhorelik/django-rest-framework,HireAnEsquire/django-rest-framework,ambivalentno/django-rest-framework,zeldalink0515/django-rest-framework,jpadilla/django-rest-framework,aericson/django-rest-framework,callorico/django-rest-framework,iheitlager/django-rest-framework,mgaitan/django-rest-framework,sbellem/django-rest-framework,davesque/django-rest-framework,werthen/django-rest-framework,davesque/django-rest-framework,canassa/django-rest-framework,adambain-vokal/django-rest-framework,kgeorgy/django-rest-framework,pombredanne/django-rest-framework,leeahoward/django-rest-framework,atombrella/django-rest-framework,kezabelle/django-rest-framework,nryoung/django-rest-framework,ambivalentno/django-rest-framework,hnakamur/django-rest-framework,douwevandermeij/django-rest-framework,fishky/django-rest-framework,justanr/django-rest-framework,raphaelmerx/django-rest-framework,delinhabit/django-rest-framework,ashishfinoit/django-rest-framework,maryokhin/django-rest-framework,jtiai/django-rest-framework,elim/django-rest-framework,yiyocx/django-rest-framework,ebsaral/django-rest-framework,justanr/django-rest-framework,rafaelang/django-rest-framework,paolopaolopaolo/django-rest-framework,sheppard/django-rest-framework,maryokhin/django-rest-framework,zeldalink0515/django-rest-framework,agconti/django-rest-framework,cheif/django-rest-framework,akalipetis/django-rest-framework,arpheno/django-rest-framework,MJafarMashhadi/django-rest-framework,jness/django-rest-framework,damycra/django-rest-framework,tomchristie/django-rest-framework,jness/django-rest-framework,jpulec/django-rest-framework,rafaelang/django-rest-framework,edx/django-rest-framework,qsorix/django-rest-framework,cyberj/django-rest-framework,ticosax/django-rest-framework,ticosax/django-rest-framework,jpadilla/django-rest-framework,krinart/django-rest-framework,bluedazzle/django-rest-framework,ossanna16/django-rest-framework,gregmuellegger/django-rest-framework,HireAnEsquire/django-rest-framework,hunter007/django-rest-framework,buptlsl/django-rest-framework,abdulhaq-e/django-rest-framework,AlexandreProenca/django-rest-framework,cheif/django-rest-framework,alacritythief/django-rest-framework,rubendura/django-rest-framework,kgeorgy/django-rest-framework,ashishfinoit/django-rest-framework,bluedazzle/django-rest-framework,wedaly/django-rest-framework,cyberj/django-rest-framework,arpheno/django-rest-framework,jerryhebert/django-rest-framework,vstoykov/django-rest-framework,tigeraniya/django-rest-framework,iheitlager/django-rest-framework,raphaelmerx/django-rest-framework,justanr/django-rest-framework,damycra/django-rest-framework,sheppard/django-rest-framework,xiaotangyuan/django-rest-framework,gregmuellegger/django-rest-framework,ezheidtmann/django-rest-framework,krinart/django-rest-framework,wwj718/django-rest-framework,pombredanne/django-rest-framework,ajaali/django-rest-framework,lubomir/django-rest-framework,atombrella/django-rest-framework,tigeraniya/django-rest-framework,hunter007/django-rest-framework,maryokhin/django-rest-framework,antonyc/django-rest-framework,jpulec/django-rest-framework,MJafarMashhadi/django-rest-framework,fishky/django-rest-framework,MJafarMashhadi/django-rest-framework,ticosax/django-rest-framework,thedrow/django-rest-framework-1,jpadilla/django-rest-framework,iheitlager/django-rest-framework
|
Add an ModelAdmin for easy management of Tokens
|
from django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
admin.site.register(Token, TokenAdmin)
|
<commit_before><commit_msg>Add an ModelAdmin for easy management of Tokens<commit_after>
|
from django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
admin.site.register(Token, TokenAdmin)
|
Add an ModelAdmin for easy management of Tokensfrom django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
admin.site.register(Token, TokenAdmin)
|
<commit_before><commit_msg>Add an ModelAdmin for easy management of Tokens<commit_after>from django.contrib import admin
from .models import Token
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
admin.site.register(Token, TokenAdmin)
|
|
7bf0ba89a74b555555e43c0a38780d8f5fe69fcf
|
host/greatfet/boards/one.py
|
host/greatfet/boards/one.py
|
#
# Copyright (c) 2016 Kyle J. Temkin <kyle@ktemkin.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ..board import GreatFETBoard
from ..peripherals.spi_flash import SPIFlash
from ..peripherals.i2c_bus import I2CBus
from ..peripherals import gpio
class GreatFETOne(GreatFETBoard):
""" Class representing GreatFET One base-boards. """
# Currently, all GreatFET One boards have an ID of zero.
HANDLED_BOARD_IDS = [0]
BOARD_NAME = "GreatFET One"
def __init__(self, **device_identifiers):
""" Initialize a new GreatFET One connection. """
# Set up the core connection.
super(GreatFETOne, self).__init__(**device_identifiers)
# Initialize the fixed peripherals that come on the board.
# TODO: Use a self.add_peripheral mechanism, so peripherals can
# be dynamically listed?
self.onboard_flash = SPIFlash(self)
self.i2c_busses = [ I2CBus(self, 'I2C0') ]
# Create an easy-to-use alias for the primary I2C bus, for rapid
# hacking/experimentation.
self.i2c = self.i2c_busses[0]
|
Rename Azalea to GreatFET One
|
Rename Azalea to GreatFET One
|
Python
|
bsd-3-clause
|
dominicgs/GreatFET-experimental,dominicgs/GreatFET-experimental,dominicgs/GreatFET-experimental,greatscottgadgets/greatfet,greatscottgadgets/greatfet,greatscottgadgets/greatfet,greatscottgadgets/greatfet
|
Rename Azalea to GreatFET One
|
#
# Copyright (c) 2016 Kyle J. Temkin <kyle@ktemkin.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ..board import GreatFETBoard
from ..peripherals.spi_flash import SPIFlash
from ..peripherals.i2c_bus import I2CBus
from ..peripherals import gpio
class GreatFETOne(GreatFETBoard):
""" Class representing GreatFET One base-boards. """
# Currently, all GreatFET One boards have an ID of zero.
HANDLED_BOARD_IDS = [0]
BOARD_NAME = "GreatFET One"
def __init__(self, **device_identifiers):
""" Initialize a new GreatFET One connection. """
# Set up the core connection.
super(GreatFETOne, self).__init__(**device_identifiers)
# Initialize the fixed peripherals that come on the board.
# TODO: Use a self.add_peripheral mechanism, so peripherals can
# be dynamically listed?
self.onboard_flash = SPIFlash(self)
self.i2c_busses = [ I2CBus(self, 'I2C0') ]
# Create an easy-to-use alias for the primary I2C bus, for rapid
# hacking/experimentation.
self.i2c = self.i2c_busses[0]
|
<commit_before><commit_msg>Rename Azalea to GreatFET One<commit_after>
|
#
# Copyright (c) 2016 Kyle J. Temkin <kyle@ktemkin.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ..board import GreatFETBoard
from ..peripherals.spi_flash import SPIFlash
from ..peripherals.i2c_bus import I2CBus
from ..peripherals import gpio
class GreatFETOne(GreatFETBoard):
""" Class representing GreatFET One base-boards. """
# Currently, all GreatFET One boards have an ID of zero.
HANDLED_BOARD_IDS = [0]
BOARD_NAME = "GreatFET One"
def __init__(self, **device_identifiers):
""" Initialize a new GreatFET One connection. """
# Set up the core connection.
super(GreatFETOne, self).__init__(**device_identifiers)
# Initialize the fixed peripherals that come on the board.
# TODO: Use a self.add_peripheral mechanism, so peripherals can
# be dynamically listed?
self.onboard_flash = SPIFlash(self)
self.i2c_busses = [ I2CBus(self, 'I2C0') ]
# Create an easy-to-use alias for the primary I2C bus, for rapid
# hacking/experimentation.
self.i2c = self.i2c_busses[0]
|
Rename Azalea to GreatFET One#
# Copyright (c) 2016 Kyle J. Temkin <kyle@ktemkin.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ..board import GreatFETBoard
from ..peripherals.spi_flash import SPIFlash
from ..peripherals.i2c_bus import I2CBus
from ..peripherals import gpio
class GreatFETOne(GreatFETBoard):
""" Class representing GreatFET One base-boards. """
# Currently, all GreatFET One boards have an ID of zero.
HANDLED_BOARD_IDS = [0]
BOARD_NAME = "GreatFET One"
def __init__(self, **device_identifiers):
""" Initialize a new GreatFET One connection. """
# Set up the core connection.
super(GreatFETOne, self).__init__(**device_identifiers)
# Initialize the fixed peripherals that come on the board.
# TODO: Use a self.add_peripheral mechanism, so peripherals can
# be dynamically listed?
self.onboard_flash = SPIFlash(self)
self.i2c_busses = [ I2CBus(self, 'I2C0') ]
# Create an easy-to-use alias for the primary I2C bus, for rapid
# hacking/experimentation.
self.i2c = self.i2c_busses[0]
|
<commit_before><commit_msg>Rename Azalea to GreatFET One<commit_after>#
# Copyright (c) 2016 Kyle J. Temkin <kyle@ktemkin.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from ..board import GreatFETBoard
from ..peripherals.spi_flash import SPIFlash
from ..peripherals.i2c_bus import I2CBus
from ..peripherals import gpio
class GreatFETOne(GreatFETBoard):
""" Class representing GreatFET One base-boards. """
# Currently, all GreatFET One boards have an ID of zero.
HANDLED_BOARD_IDS = [0]
BOARD_NAME = "GreatFET One"
def __init__(self, **device_identifiers):
""" Initialize a new GreatFET One connection. """
# Set up the core connection.
super(GreatFETOne, self).__init__(**device_identifiers)
# Initialize the fixed peripherals that come on the board.
# TODO: Use a self.add_peripheral mechanism, so peripherals can
# be dynamically listed?
self.onboard_flash = SPIFlash(self)
self.i2c_busses = [ I2CBus(self, 'I2C0') ]
# Create an easy-to-use alias for the primary I2C bus, for rapid
# hacking/experimentation.
self.i2c = self.i2c_busses[0]
|
|
530d1d75872df47a1dbd90b2b6cfd5ebac0fe4c8
|
badgecheck/server/app.py
|
badgecheck/server/app.py
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
def request_wants_json():
best = request.accept_mimetypes.best_match(['application/json', 'text/html'])
return best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['GET'])
def resultGetRedirect():
return redirect('/')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
if request_wants_json():
return (json.dumps(verification_results, indent=4), 200, {'Content-Type': 'application/json'},)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
Establish basic JSON API capability.
|
Establish basic JSON API capability.
|
Python
|
apache-2.0
|
concentricsky/badgecheck,openbadges/badgecheck,IMSGlobal/openbadges-validator-core,IMSGlobal/openbadges-validator-core,concentricsky/badgecheck,openbadges/badgecheck
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
Establish basic JSON API capability.
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
def request_wants_json():
best = request.accept_mimetypes.best_match(['application/json', 'text/html'])
return best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['GET'])
def resultGetRedirect():
return redirect('/')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
if request_wants_json():
return (json.dumps(verification_results, indent=4), 200, {'Content-Type': 'application/json'},)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
<commit_msg>Establish basic JSON API capability.<commit_after>
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
def request_wants_json():
best = request.accept_mimetypes.best_match(['application/json', 'text/html'])
return best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['GET'])
def resultGetRedirect():
return redirect('/')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
if request_wants_json():
return (json.dumps(verification_results, indent=4), 200, {'Content-Type': 'application/json'},)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
Establish basic JSON API capability.from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
def request_wants_json():
best = request.accept_mimetypes.best_match(['application/json', 'text/html'])
return best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['GET'])
def resultGetRedirect():
return redirect('/')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
if request_wants_json():
return (json.dumps(verification_results, indent=4), 200, {'Content-Type': 'application/json'},)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
<commit_msg>Establish basic JSON API capability.<commit_after>from flask import Flask, redirect, render_template, request
import json
import six
from badgecheck.verifier import verify
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024 # 4mb file upload limit
def request_wants_json():
best = request.accept_mimetypes.best_match(['application/json', 'text/html'])
return best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']
@app.route("/")
def home():
return render_template('index.html')
@app.route("/results", methods=['GET'])
def resultGetRedirect():
return redirect('/')
@app.route("/results", methods=['POST'])
def results():
if isinstance(request.form['data'], six.string_types) or request.files:
user_input = request.form['data']
if 'image' in request.files and len(request.files['image'].filename):
user_input = request.files['image']
verification_results = verify(user_input)
if request_wants_json():
return (json.dumps(verification_results, indent=4), 200, {'Content-Type': 'application/json'},)
return render_template(
'results.html', results=json.dumps(verification_results, indent=4))
return redirect('/')
if __name__ == "__main__":
app.run()
|
82140baa69ff88c329ad500224d6dfa605867536
|
tests/smtp.py
|
tests/smtp.py
|
import smtplib
import sys
from email import mime
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
msg = mime.multipart.MIMEMultipart()
msg['Subject'] = 'Test email'
msg['From'] = sys.argv[1]
msg['To'] = sys.argv[2]
msg.preamble = 'Test email'
s = smtplib.SMTP('localhost')
s.set_debuglevel(1)
s.send_message(msg)
s.quit()
|
Add a very simple test script for sending mails
|
Add a very simple test script for sending mails
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
Add a very simple test script for sending mails
|
import smtplib
import sys
from email import mime
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
msg = mime.multipart.MIMEMultipart()
msg['Subject'] = 'Test email'
msg['From'] = sys.argv[1]
msg['To'] = sys.argv[2]
msg.preamble = 'Test email'
s = smtplib.SMTP('localhost')
s.set_debuglevel(1)
s.send_message(msg)
s.quit()
|
<commit_before><commit_msg>Add a very simple test script for sending mails<commit_after>
|
import smtplib
import sys
from email import mime
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
msg = mime.multipart.MIMEMultipart()
msg['Subject'] = 'Test email'
msg['From'] = sys.argv[1]
msg['To'] = sys.argv[2]
msg.preamble = 'Test email'
s = smtplib.SMTP('localhost')
s.set_debuglevel(1)
s.send_message(msg)
s.quit()
|
Add a very simple test script for sending mailsimport smtplib
import sys
from email import mime
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
msg = mime.multipart.MIMEMultipart()
msg['Subject'] = 'Test email'
msg['From'] = sys.argv[1]
msg['To'] = sys.argv[2]
msg.preamble = 'Test email'
s = smtplib.SMTP('localhost')
s.set_debuglevel(1)
s.send_message(msg)
s.quit()
|
<commit_before><commit_msg>Add a very simple test script for sending mails<commit_after>import smtplib
import sys
from email import mime
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
msg = mime.multipart.MIMEMultipart()
msg['Subject'] = 'Test email'
msg['From'] = sys.argv[1]
msg['To'] = sys.argv[2]
msg.preamble = 'Test email'
s = smtplib.SMTP('localhost')
s.set_debuglevel(1)
s.send_message(msg)
s.quit()
|
|
5ae10de8c33b3388c0e593187be9fb62ac1f2c2c
|
django/setup.py
|
django/setup.py
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
# because pooling doesn't work with meinheld, it's necessary to create a ton of gunicorn threads (think apache pre-fork)
# to allow the OS to switch processes when waiting for socket I/O.
args.max_threads *= 8
# and go from there until the database server runs out of memory for new threads (connections)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
Use more gunicorn threads when pooling database connector isn't available.
|
Use more gunicorn threads when pooling database connector isn't available.
When using postgres with meinheld, the best you can do so far (as far as I know) is up the number of threads.
|
Python
|
bsd-3-clause
|
jetty-project/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,testn/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,grob/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,khellang/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,leafo/FrameworkBenchmarks,khellang/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zloster/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,doom369/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zloster/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sxend/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Verber/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,dmacd/FB-try1,doom369/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,leafo/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,grob/FrameworkBenchmarks,Verber/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zloster/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,grob/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zapov/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,methane/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,grob/FrameworkBenchmarks,doom369/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,dmacd/FB-try1,psfblair/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,leafo/FrameworkBenchmarks,herloct/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,dmacd/FB-try1,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,joshk/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,doom369/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,dmacd/FB-try1,markkolich/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,grob/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,methane/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,testn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,leafo/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,dmacd/FB-try1,zhuochenKIDD/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,dmacd/FB-try1,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,torhve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,dmacd/FB-try1,denkab/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,denkab/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,testn/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,grob/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,leafo/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,methane/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,valyala/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,valyala/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,joshk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,torhve/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,denkab/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Verber/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,khellang/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,torhve/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,dmacd/FB-try1,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,denkab/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,torhve/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,methane/FrameworkBenchmarks,actframework/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,methane/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,leafo/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,dmacd/FB-try1,zhuochenKIDD/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sgml/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,leafo/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,herloct/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zloster/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,dmacd/FB-try1,Jesterovskiy/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,actframework/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,testn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,grob/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,dmacd/FB-try1,jamming/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,khellang/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,grob/FrameworkBenchmarks,sxend/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,leafo/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zloster/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,dmacd/FB-try1,kbrock/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,leafo/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,joshk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
Use more gunicorn threads when pooling database connector isn't available.
When using postgres with meinheld, the best you can do so far (as far as I know) is up the number of threads.
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
# because pooling doesn't work with meinheld, it's necessary to create a ton of gunicorn threads (think apache pre-fork)
# to allow the OS to switch processes when waiting for socket I/O.
args.max_threads *= 8
# and go from there until the database server runs out of memory for new threads (connections)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
<commit_before>import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
<commit_msg>Use more gunicorn threads when pooling database connector isn't available.
When using postgres with meinheld, the best you can do so far (as far as I know) is up the number of threads.<commit_after>
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
# because pooling doesn't work with meinheld, it's necessary to create a ton of gunicorn threads (think apache pre-fork)
# to allow the OS to switch processes when waiting for socket I/O.
args.max_threads *= 8
# and go from there until the database server runs out of memory for new threads (connections)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
Use more gunicorn threads when pooling database connector isn't available.
When using postgres with meinheld, the best you can do so far (as far as I know) is up the number of threads.import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
# because pooling doesn't work with meinheld, it's necessary to create a ton of gunicorn threads (think apache pre-fork)
# to allow the OS to switch processes when waiting for socket I/O.
args.max_threads *= 8
# and go from there until the database server runs out of memory for new threads (connections)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
<commit_before>import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
<commit_msg>Use more gunicorn threads when pooling database connector isn't available.
When using postgres with meinheld, the best you can do so far (as far as I know) is up the number of threads.<commit_after>import subprocess
import sys
import setup_util
import os
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("django/hello/hello/settings.py", "HOST': '.*'", "HOST': '" + args.database_host + "'")
setup_util.replace_text("django/hello/hello/settings.py", "\/home\/ubuntu", home)
# because pooling doesn't work with meinheld, it's necessary to create a ton of gunicorn threads (think apache pre-fork)
# to allow the OS to switch processes when waiting for socket I/O.
args.max_threads *= 8
# and go from there until the database server runs out of memory for new threads (connections)
subprocess.Popen("gunicorn hello.wsgi:application --worker-class=\"egg:meinheld#gunicorn_worker\" -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="django/hello")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
|
53fbe6bed16fb4cddb0ffb533b8d1632ed209c5c
|
corehq/motech/fhir/tests/test_smart_metadata_endpoints.py
|
corehq/motech/fhir/tests/test_smart_metadata_endpoints.py
|
import json
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from corehq.motech.fhir.models import build_capability_statement
from corehq.util.test_utils import flag_enabled
from corehq.util.view_utils import absolute_reverse
class TestConfigurationView(TestCase):
def test_configuration_view(self):
with flag_enabled('FHIR_INTEGRATION'):
response = Client().get(
reverse("smart_configuration_view", kwargs={
'domain': "test",
"fhir_version_name": "R4"
})
)
json_content = json.loads(response.content.decode('utf-8'))
self.assertEqual(json_content['authorization_endpoint'], absolute_reverse("oauth2_provider:authorize"))
self.assertEqual(json_content['token_endpoint'], absolute_reverse("oauth2_provider:token"))
class TestCapabilityStatement(TestCase):
def test_capability_statement(self):
statement = build_capability_statement("test_domain").to_json()
expected_statement = {
"date":
"2021-03-23",
"fhirVersion":
"4.0.1",
"kind":
"instance",
"status":
"active",
"format": ["json"],
"rest": [{
"mode": "server",
"security": {
"service": [{
"coding": [{
"code": "SMART-on-FHIR",
"system": "http://hl7.org/fhir/restful-security-service"
}]
}],
"extension": [{
"extension": [{
"valueUri": absolute_reverse("oauth2_provider:token"),
"url": "token"
}, {
"valueUri": absolute_reverse("oauth2_provider:authorize"),
"url": "authorize"
}],
"url": "http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris"
}]
}
}]
}
self.assertDictEqual(statement, expected_statement)
|
Add smart metadata endpoint tests
|
Add smart metadata endpoint tests
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add smart metadata endpoint tests
|
import json
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from corehq.motech.fhir.models import build_capability_statement
from corehq.util.test_utils import flag_enabled
from corehq.util.view_utils import absolute_reverse
class TestConfigurationView(TestCase):
def test_configuration_view(self):
with flag_enabled('FHIR_INTEGRATION'):
response = Client().get(
reverse("smart_configuration_view", kwargs={
'domain': "test",
"fhir_version_name": "R4"
})
)
json_content = json.loads(response.content.decode('utf-8'))
self.assertEqual(json_content['authorization_endpoint'], absolute_reverse("oauth2_provider:authorize"))
self.assertEqual(json_content['token_endpoint'], absolute_reverse("oauth2_provider:token"))
class TestCapabilityStatement(TestCase):
def test_capability_statement(self):
statement = build_capability_statement("test_domain").to_json()
expected_statement = {
"date":
"2021-03-23",
"fhirVersion":
"4.0.1",
"kind":
"instance",
"status":
"active",
"format": ["json"],
"rest": [{
"mode": "server",
"security": {
"service": [{
"coding": [{
"code": "SMART-on-FHIR",
"system": "http://hl7.org/fhir/restful-security-service"
}]
}],
"extension": [{
"extension": [{
"valueUri": absolute_reverse("oauth2_provider:token"),
"url": "token"
}, {
"valueUri": absolute_reverse("oauth2_provider:authorize"),
"url": "authorize"
}],
"url": "http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris"
}]
}
}]
}
self.assertDictEqual(statement, expected_statement)
|
<commit_before><commit_msg>Add smart metadata endpoint tests<commit_after>
|
import json
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from corehq.motech.fhir.models import build_capability_statement
from corehq.util.test_utils import flag_enabled
from corehq.util.view_utils import absolute_reverse
class TestConfigurationView(TestCase):
def test_configuration_view(self):
with flag_enabled('FHIR_INTEGRATION'):
response = Client().get(
reverse("smart_configuration_view", kwargs={
'domain': "test",
"fhir_version_name": "R4"
})
)
json_content = json.loads(response.content.decode('utf-8'))
self.assertEqual(json_content['authorization_endpoint'], absolute_reverse("oauth2_provider:authorize"))
self.assertEqual(json_content['token_endpoint'], absolute_reverse("oauth2_provider:token"))
class TestCapabilityStatement(TestCase):
def test_capability_statement(self):
statement = build_capability_statement("test_domain").to_json()
expected_statement = {
"date":
"2021-03-23",
"fhirVersion":
"4.0.1",
"kind":
"instance",
"status":
"active",
"format": ["json"],
"rest": [{
"mode": "server",
"security": {
"service": [{
"coding": [{
"code": "SMART-on-FHIR",
"system": "http://hl7.org/fhir/restful-security-service"
}]
}],
"extension": [{
"extension": [{
"valueUri": absolute_reverse("oauth2_provider:token"),
"url": "token"
}, {
"valueUri": absolute_reverse("oauth2_provider:authorize"),
"url": "authorize"
}],
"url": "http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris"
}]
}
}]
}
self.assertDictEqual(statement, expected_statement)
|
Add smart metadata endpoint testsimport json
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from corehq.motech.fhir.models import build_capability_statement
from corehq.util.test_utils import flag_enabled
from corehq.util.view_utils import absolute_reverse
class TestConfigurationView(TestCase):
def test_configuration_view(self):
with flag_enabled('FHIR_INTEGRATION'):
response = Client().get(
reverse("smart_configuration_view", kwargs={
'domain': "test",
"fhir_version_name": "R4"
})
)
json_content = json.loads(response.content.decode('utf-8'))
self.assertEqual(json_content['authorization_endpoint'], absolute_reverse("oauth2_provider:authorize"))
self.assertEqual(json_content['token_endpoint'], absolute_reverse("oauth2_provider:token"))
class TestCapabilityStatement(TestCase):
def test_capability_statement(self):
statement = build_capability_statement("test_domain").to_json()
expected_statement = {
"date":
"2021-03-23",
"fhirVersion":
"4.0.1",
"kind":
"instance",
"status":
"active",
"format": ["json"],
"rest": [{
"mode": "server",
"security": {
"service": [{
"coding": [{
"code": "SMART-on-FHIR",
"system": "http://hl7.org/fhir/restful-security-service"
}]
}],
"extension": [{
"extension": [{
"valueUri": absolute_reverse("oauth2_provider:token"),
"url": "token"
}, {
"valueUri": absolute_reverse("oauth2_provider:authorize"),
"url": "authorize"
}],
"url": "http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris"
}]
}
}]
}
self.assertDictEqual(statement, expected_statement)
|
<commit_before><commit_msg>Add smart metadata endpoint tests<commit_after>import json
from django.test import TestCase
from django.test.client import Client
from django.urls import reverse
from corehq.motech.fhir.models import build_capability_statement
from corehq.util.test_utils import flag_enabled
from corehq.util.view_utils import absolute_reverse
class TestConfigurationView(TestCase):
def test_configuration_view(self):
with flag_enabled('FHIR_INTEGRATION'):
response = Client().get(
reverse("smart_configuration_view", kwargs={
'domain': "test",
"fhir_version_name": "R4"
})
)
json_content = json.loads(response.content.decode('utf-8'))
self.assertEqual(json_content['authorization_endpoint'], absolute_reverse("oauth2_provider:authorize"))
self.assertEqual(json_content['token_endpoint'], absolute_reverse("oauth2_provider:token"))
class TestCapabilityStatement(TestCase):
def test_capability_statement(self):
statement = build_capability_statement("test_domain").to_json()
expected_statement = {
"date":
"2021-03-23",
"fhirVersion":
"4.0.1",
"kind":
"instance",
"status":
"active",
"format": ["json"],
"rest": [{
"mode": "server",
"security": {
"service": [{
"coding": [{
"code": "SMART-on-FHIR",
"system": "http://hl7.org/fhir/restful-security-service"
}]
}],
"extension": [{
"extension": [{
"valueUri": absolute_reverse("oauth2_provider:token"),
"url": "token"
}, {
"valueUri": absolute_reverse("oauth2_provider:authorize"),
"url": "authorize"
}],
"url": "http://fhir-registry.smarthealthit.org/StructureDefinition/oauth-uris"
}]
}
}]
}
self.assertDictEqual(statement, expected_statement)
|
|
93eb8e085854996e28982fad67915810328f8dc8
|
examples/adjacency_list.py
|
examples/adjacency_list.py
|
from collections import deque
from peewee import *
db = SqliteDatabase(':memory:')
class TreeNode(Model):
parent = ForeignKeyField('self', backref='children', null=True)
name = TextField()
class Meta:
database = db
def __str__(self):
return 'name=%s' % self.name
def dump(self, _indent=0):
return (' ' * _indent + repr(self) + '\n' +
''.join(c.dump(_indent + 1) for c in self.children))
if __name__ == '__main__':
db.create_tables([TreeNode])
tree = (
'root', (
('node-1', (
('sub-1-1', ()),
('sub-1-2', ()))),
('node-2', (
('sub-2-1', (
('sub-sub-2-1-1', ()),
('sub-sub-2-1-2', ()))),
('sub-2-2', (
('sub-sub-2-2-1', ()),
('sub-sub-2-2-2', ()))))),
('node-3', ()),
('node-4', (
('sub-4-1', ()),
('sub-4-2', ()),
('sub-4-3', ()),
('sub-4-4', ())))))
with db.atomic():
stack = deque([(None, tree)])
while stack:
parent, t = stack.pop()
name, children = t
node = TreeNode.create(name=name, parent=parent)
for childdef in children:
stack.appendleft((node, childdef))
root = TreeNode.get(name='root')
print(root.dump())
|
Add adjacency list example. Compare to SQA's.
|
Add adjacency list example. Compare to SQA's.
[skip ci]
|
Python
|
mit
|
coleifer/peewee,coleifer/peewee,coleifer/peewee
|
Add adjacency list example. Compare to SQA's.
[skip ci]
|
from collections import deque
from peewee import *
db = SqliteDatabase(':memory:')
class TreeNode(Model):
parent = ForeignKeyField('self', backref='children', null=True)
name = TextField()
class Meta:
database = db
def __str__(self):
return 'name=%s' % self.name
def dump(self, _indent=0):
return (' ' * _indent + repr(self) + '\n' +
''.join(c.dump(_indent + 1) for c in self.children))
if __name__ == '__main__':
db.create_tables([TreeNode])
tree = (
'root', (
('node-1', (
('sub-1-1', ()),
('sub-1-2', ()))),
('node-2', (
('sub-2-1', (
('sub-sub-2-1-1', ()),
('sub-sub-2-1-2', ()))),
('sub-2-2', (
('sub-sub-2-2-1', ()),
('sub-sub-2-2-2', ()))))),
('node-3', ()),
('node-4', (
('sub-4-1', ()),
('sub-4-2', ()),
('sub-4-3', ()),
('sub-4-4', ())))))
with db.atomic():
stack = deque([(None, tree)])
while stack:
parent, t = stack.pop()
name, children = t
node = TreeNode.create(name=name, parent=parent)
for childdef in children:
stack.appendleft((node, childdef))
root = TreeNode.get(name='root')
print(root.dump())
|
<commit_before><commit_msg>Add adjacency list example. Compare to SQA's.
[skip ci]<commit_after>
|
from collections import deque
from peewee import *
db = SqliteDatabase(':memory:')
class TreeNode(Model):
parent = ForeignKeyField('self', backref='children', null=True)
name = TextField()
class Meta:
database = db
def __str__(self):
return 'name=%s' % self.name
def dump(self, _indent=0):
return (' ' * _indent + repr(self) + '\n' +
''.join(c.dump(_indent + 1) for c in self.children))
if __name__ == '__main__':
db.create_tables([TreeNode])
tree = (
'root', (
('node-1', (
('sub-1-1', ()),
('sub-1-2', ()))),
('node-2', (
('sub-2-1', (
('sub-sub-2-1-1', ()),
('sub-sub-2-1-2', ()))),
('sub-2-2', (
('sub-sub-2-2-1', ()),
('sub-sub-2-2-2', ()))))),
('node-3', ()),
('node-4', (
('sub-4-1', ()),
('sub-4-2', ()),
('sub-4-3', ()),
('sub-4-4', ())))))
with db.atomic():
stack = deque([(None, tree)])
while stack:
parent, t = stack.pop()
name, children = t
node = TreeNode.create(name=name, parent=parent)
for childdef in children:
stack.appendleft((node, childdef))
root = TreeNode.get(name='root')
print(root.dump())
|
Add adjacency list example. Compare to SQA's.
[skip ci]from collections import deque
from peewee import *
db = SqliteDatabase(':memory:')
class TreeNode(Model):
parent = ForeignKeyField('self', backref='children', null=True)
name = TextField()
class Meta:
database = db
def __str__(self):
return 'name=%s' % self.name
def dump(self, _indent=0):
return (' ' * _indent + repr(self) + '\n' +
''.join(c.dump(_indent + 1) for c in self.children))
if __name__ == '__main__':
db.create_tables([TreeNode])
tree = (
'root', (
('node-1', (
('sub-1-1', ()),
('sub-1-2', ()))),
('node-2', (
('sub-2-1', (
('sub-sub-2-1-1', ()),
('sub-sub-2-1-2', ()))),
('sub-2-2', (
('sub-sub-2-2-1', ()),
('sub-sub-2-2-2', ()))))),
('node-3', ()),
('node-4', (
('sub-4-1', ()),
('sub-4-2', ()),
('sub-4-3', ()),
('sub-4-4', ())))))
with db.atomic():
stack = deque([(None, tree)])
while stack:
parent, t = stack.pop()
name, children = t
node = TreeNode.create(name=name, parent=parent)
for childdef in children:
stack.appendleft((node, childdef))
root = TreeNode.get(name='root')
print(root.dump())
|
<commit_before><commit_msg>Add adjacency list example. Compare to SQA's.
[skip ci]<commit_after>from collections import deque
from peewee import *
db = SqliteDatabase(':memory:')
class TreeNode(Model):
parent = ForeignKeyField('self', backref='children', null=True)
name = TextField()
class Meta:
database = db
def __str__(self):
return 'name=%s' % self.name
def dump(self, _indent=0):
return (' ' * _indent + repr(self) + '\n' +
''.join(c.dump(_indent + 1) for c in self.children))
if __name__ == '__main__':
db.create_tables([TreeNode])
tree = (
'root', (
('node-1', (
('sub-1-1', ()),
('sub-1-2', ()))),
('node-2', (
('sub-2-1', (
('sub-sub-2-1-1', ()),
('sub-sub-2-1-2', ()))),
('sub-2-2', (
('sub-sub-2-2-1', ()),
('sub-sub-2-2-2', ()))))),
('node-3', ()),
('node-4', (
('sub-4-1', ()),
('sub-4-2', ()),
('sub-4-3', ()),
('sub-4-4', ())))))
with db.atomic():
stack = deque([(None, tree)])
while stack:
parent, t = stack.pop()
name, children = t
node = TreeNode.create(name=name, parent=parent)
for childdef in children:
stack.appendleft((node, childdef))
root = TreeNode.get(name='root')
print(root.dump())
|
|
bd928bc44bc5a357fea6d2184549f33ea244f744
|
manoseimas/mps_v2/migrations/0007_auto_20150605_0930.py
|
manoseimas/mps_v2/migrations/0007_auto_20150605_0930.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mps_v2', '0006_auto_20150604_1500'),
]
operations = [
migrations.AlterField(
model_name='parliamentmember',
name='groups',
field=models.ManyToManyField(related_name='members', through='mps_v2.GroupMembership', to='mps_v2.Group'),
),
]
|
Add related name to MP <-> group relationship.
|
Add related name to MP <-> group relationship.
|
Python
|
agpl-3.0
|
ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt
|
Add related name to MP <-> group relationship.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mps_v2', '0006_auto_20150604_1500'),
]
operations = [
migrations.AlterField(
model_name='parliamentmember',
name='groups',
field=models.ManyToManyField(related_name='members', through='mps_v2.GroupMembership', to='mps_v2.Group'),
),
]
|
<commit_before><commit_msg>Add related name to MP <-> group relationship.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mps_v2', '0006_auto_20150604_1500'),
]
operations = [
migrations.AlterField(
model_name='parliamentmember',
name='groups',
field=models.ManyToManyField(related_name='members', through='mps_v2.GroupMembership', to='mps_v2.Group'),
),
]
|
Add related name to MP <-> group relationship.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mps_v2', '0006_auto_20150604_1500'),
]
operations = [
migrations.AlterField(
model_name='parliamentmember',
name='groups',
field=models.ManyToManyField(related_name='members', through='mps_v2.GroupMembership', to='mps_v2.Group'),
),
]
|
<commit_before><commit_msg>Add related name to MP <-> group relationship.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mps_v2', '0006_auto_20150604_1500'),
]
operations = [
migrations.AlterField(
model_name='parliamentmember',
name='groups',
field=models.ManyToManyField(related_name='members', through='mps_v2.GroupMembership', to='mps_v2.Group'),
),
]
|
|
45e714e69495f7bc3d93ce88e7a4e0a7bba4b39a
|
migrations/versions/0233_updated_first_class_dates.py
|
migrations/versions/0233_updated_first_class_dates.py
|
"""empty message
Revision ID: 0233_updated_first_class_dates
Revises: 0230_noti_postage_constraint_3
"""
revision = '0233_updated_first_class_dates'
down_revision = '0230_noti_postage_constraint_3'
from datetime import datetime
from alembic import op
from sqlalchemy.sql import text
START_DATE = datetime(2018, 8, 31, 23, 0)
def upgrade():
conn = op.get_bind()
conn.execute(text(
"""UPDATE letter_rates SET start_date = :start_date WHERE post_class = 'first'"""
), start_date=START_DATE)
def downgrade():
'''
This data migration should not be downgraded. Downgrading may cause billing errors
and the /montly-usage endpoint to stop working.
'''
|
Update first class letter rate start dates
|
Update first class letter rate start dates
We want to bring the start dates for first class letter rates forward by
a month so that we don't see billing errors when sending first class letters now.
(The feature will still go live at the planned time - this is to let us test things
beforehand.)
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Update first class letter rate start dates
We want to bring the start dates for first class letter rates forward by
a month so that we don't see billing errors when sending first class letters now.
(The feature will still go live at the planned time - this is to let us test things
beforehand.)
|
"""empty message
Revision ID: 0233_updated_first_class_dates
Revises: 0230_noti_postage_constraint_3
"""
revision = '0233_updated_first_class_dates'
down_revision = '0230_noti_postage_constraint_3'
from datetime import datetime
from alembic import op
from sqlalchemy.sql import text
START_DATE = datetime(2018, 8, 31, 23, 0)
def upgrade():
conn = op.get_bind()
conn.execute(text(
"""UPDATE letter_rates SET start_date = :start_date WHERE post_class = 'first'"""
), start_date=START_DATE)
def downgrade():
'''
This data migration should not be downgraded. Downgrading may cause billing errors
and the /montly-usage endpoint to stop working.
'''
|
<commit_before><commit_msg>Update first class letter rate start dates
We want to bring the start dates for first class letter rates forward by
a month so that we don't see billing errors when sending first class letters now.
(The feature will still go live at the planned time - this is to let us test things
beforehand.)<commit_after>
|
"""empty message
Revision ID: 0233_updated_first_class_dates
Revises: 0230_noti_postage_constraint_3
"""
revision = '0233_updated_first_class_dates'
down_revision = '0230_noti_postage_constraint_3'
from datetime import datetime
from alembic import op
from sqlalchemy.sql import text
START_DATE = datetime(2018, 8, 31, 23, 0)
def upgrade():
conn = op.get_bind()
conn.execute(text(
"""UPDATE letter_rates SET start_date = :start_date WHERE post_class = 'first'"""
), start_date=START_DATE)
def downgrade():
'''
This data migration should not be downgraded. Downgrading may cause billing errors
and the /montly-usage endpoint to stop working.
'''
|
Update first class letter rate start dates
We want to bring the start dates for first class letter rates forward by
a month so that we don't see billing errors when sending first class letters now.
(The feature will still go live at the planned time - this is to let us test things
beforehand.)"""empty message
Revision ID: 0233_updated_first_class_dates
Revises: 0230_noti_postage_constraint_3
"""
revision = '0233_updated_first_class_dates'
down_revision = '0230_noti_postage_constraint_3'
from datetime import datetime
from alembic import op
from sqlalchemy.sql import text
START_DATE = datetime(2018, 8, 31, 23, 0)
def upgrade():
conn = op.get_bind()
conn.execute(text(
"""UPDATE letter_rates SET start_date = :start_date WHERE post_class = 'first'"""
), start_date=START_DATE)
def downgrade():
'''
This data migration should not be downgraded. Downgrading may cause billing errors
and the /montly-usage endpoint to stop working.
'''
|
<commit_before><commit_msg>Update first class letter rate start dates
We want to bring the start dates for first class letter rates forward by
a month so that we don't see billing errors when sending first class letters now.
(The feature will still go live at the planned time - this is to let us test things
beforehand.)<commit_after>"""empty message
Revision ID: 0233_updated_first_class_dates
Revises: 0230_noti_postage_constraint_3
"""
revision = '0233_updated_first_class_dates'
down_revision = '0230_noti_postage_constraint_3'
from datetime import datetime
from alembic import op
from sqlalchemy.sql import text
START_DATE = datetime(2018, 8, 31, 23, 0)
def upgrade():
conn = op.get_bind()
conn.execute(text(
"""UPDATE letter_rates SET start_date = :start_date WHERE post_class = 'first'"""
), start_date=START_DATE)
def downgrade():
'''
This data migration should not be downgraded. Downgrading may cause billing errors
and the /montly-usage endpoint to stop working.
'''
|
|
734333e1bdcbe6077b38e471000746d4566a39cb
|
temba/msgs/migrations/0087_populate_broadcast_send_all.py
|
temba/msgs/migrations/0087_populate_broadcast_send_all.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
Add data migrations to populate broadcast send all field
|
Add data migrations to populate broadcast send all field
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro
|
Add data migrations to populate broadcast send all field
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
<commit_before><commit_msg>Add data migrations to populate broadcast send all field<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
Add data migrations to populate broadcast send all field# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
<commit_before><commit_msg>Add data migrations to populate broadcast send all field<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
|
64a59a2c9014e57039af1f8362bbf3fcba19aa42
|
teami18n/teami18n/management/commands/import_country_data.py
|
teami18n/teami18n/management/commands/import_country_data.py
|
from datetime import date
from django.core.management import BaseCommand
import wbpy
from teami18n.models import Country
class Command(BaseCommand):
help = 'Import Country Data'
def handle(self, *args, **options):
def get_population(country_code):
current_date = date.today()
last_year = str(current_date.year - 1)
try:
population_dataset = api.get_dataset(
"SP.POP.TOTL", [country_code], date=last_year)
population = population_dataset.as_dict()[country_code][last_year]
return population
except ValueError:
pass
api = wbpy.IndicatorAPI()
for country in Country.objects.all():
population = get_population(country.code)
if population:
country.population = int(population)
country.save()
|
Add command to import countries population
|
Add command to import countries population
|
Python
|
mit
|
team-i18n/hackaway,team-i18n/hackaway,team-i18n/hackaway
|
Add command to import countries population
|
from datetime import date
from django.core.management import BaseCommand
import wbpy
from teami18n.models import Country
class Command(BaseCommand):
help = 'Import Country Data'
def handle(self, *args, **options):
def get_population(country_code):
current_date = date.today()
last_year = str(current_date.year - 1)
try:
population_dataset = api.get_dataset(
"SP.POP.TOTL", [country_code], date=last_year)
population = population_dataset.as_dict()[country_code][last_year]
return population
except ValueError:
pass
api = wbpy.IndicatorAPI()
for country in Country.objects.all():
population = get_population(country.code)
if population:
country.population = int(population)
country.save()
|
<commit_before><commit_msg>Add command to import countries population<commit_after>
|
from datetime import date
from django.core.management import BaseCommand
import wbpy
from teami18n.models import Country
class Command(BaseCommand):
help = 'Import Country Data'
def handle(self, *args, **options):
def get_population(country_code):
current_date = date.today()
last_year = str(current_date.year - 1)
try:
population_dataset = api.get_dataset(
"SP.POP.TOTL", [country_code], date=last_year)
population = population_dataset.as_dict()[country_code][last_year]
return population
except ValueError:
pass
api = wbpy.IndicatorAPI()
for country in Country.objects.all():
population = get_population(country.code)
if population:
country.population = int(population)
country.save()
|
Add command to import countries populationfrom datetime import date
from django.core.management import BaseCommand
import wbpy
from teami18n.models import Country
class Command(BaseCommand):
help = 'Import Country Data'
def handle(self, *args, **options):
def get_population(country_code):
current_date = date.today()
last_year = str(current_date.year - 1)
try:
population_dataset = api.get_dataset(
"SP.POP.TOTL", [country_code], date=last_year)
population = population_dataset.as_dict()[country_code][last_year]
return population
except ValueError:
pass
api = wbpy.IndicatorAPI()
for country in Country.objects.all():
population = get_population(country.code)
if population:
country.population = int(population)
country.save()
|
<commit_before><commit_msg>Add command to import countries population<commit_after>from datetime import date
from django.core.management import BaseCommand
import wbpy
from teami18n.models import Country
class Command(BaseCommand):
help = 'Import Country Data'
def handle(self, *args, **options):
def get_population(country_code):
current_date = date.today()
last_year = str(current_date.year - 1)
try:
population_dataset = api.get_dataset(
"SP.POP.TOTL", [country_code], date=last_year)
population = population_dataset.as_dict()[country_code][last_year]
return population
except ValueError:
pass
api = wbpy.IndicatorAPI()
for country in Country.objects.all():
population = get_population(country.code)
if population:
country.population = int(population)
country.save()
|
|
e4612e9c7cb61906a2f32a6a227f9ecec3a73957
|
bin/escape_key.py
|
bin/escape_key.py
|
#!/usr/bin/env python
#
# Change caps lock to escape on built in keyboards
#
# https://github.com/tekezo/Seil/issues/68#issuecomment-229260237
# Must be run with sudo, SIP must be disabled, requires restart
#
from xml.etree import ElementTree
import argparse
import os
import subprocess
# Find this product and the vendor ID by checking "System Information" -> USB -> Apple Internal Keyboard / Trackpad
PRODUCT_ID = 610
# Find this by cross referencing the vendor and product ID (in base 10) with
# what's in this plist. If you can't find it here check either of these:
#
# /System/Library/Extensions/AppleHIDKeyboard.kext/Contents/Info.plist
# /System/Library/Extensions/AppleTopCase.kext/Contents/PlugIns/AppleTopCaseHIDEventDriver.kext/Contents/Info.plist
#
PLIST_PATH = "/System/Library/Extensions/AppleUSBTopCase.kext/Contents/PlugIns/AppleUSBTCKeyEventDriver.kext/Contents/Info.plist"
HEADER = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">"""
plist = ElementTree.parse(PLIST_PATH)
driverdef = plist.findall(".//*[key='ProductID']/[integer='%d']"
% PRODUCT_ID)[0]
ElementTree.SubElement(driverdef, 'key').text = 'KeyboardUsageMap'
map_dict = ElementTree.SubElement(driverdef, 'dict')
ElementTree.SubElement(map_dict, 'key').text = '0x00070039'
ElementTree.SubElement(map_dict, 'integer').text = '0x00070029'
os.rename(PLIST_PATH, PLIST_PATH + '.bak')
with open(PLIST_PATH, 'w') as f:
treestr = ElementTree.tostring(plist.getroot(),
encoding='UTF-8').replace(' />', '/>')
f.write("%s\n%s" % (HEADER, treestr))
subprocess.call("touch /System/Library/Extensions && sudo kextcache -update-volume /".split())
|
Add script for changing caps lock to escape
|
Add script for changing caps lock to escape
|
Python
|
mit
|
keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles
|
Add script for changing caps lock to escape
|
#!/usr/bin/env python
#
# Change caps lock to escape on built in keyboards
#
# https://github.com/tekezo/Seil/issues/68#issuecomment-229260237
# Must be run with sudo, SIP must be disabled, requires restart
#
from xml.etree import ElementTree
import argparse
import os
import subprocess
# Find this product and the vendor ID by checking "System Information" -> USB -> Apple Internal Keyboard / Trackpad
PRODUCT_ID = 610
# Find this by cross referencing the vendor and product ID (in base 10) with
# what's in this plist. If you can't find it here check either of these:
#
# /System/Library/Extensions/AppleHIDKeyboard.kext/Contents/Info.plist
# /System/Library/Extensions/AppleTopCase.kext/Contents/PlugIns/AppleTopCaseHIDEventDriver.kext/Contents/Info.plist
#
PLIST_PATH = "/System/Library/Extensions/AppleUSBTopCase.kext/Contents/PlugIns/AppleUSBTCKeyEventDriver.kext/Contents/Info.plist"
HEADER = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">"""
plist = ElementTree.parse(PLIST_PATH)
driverdef = plist.findall(".//*[key='ProductID']/[integer='%d']"
% PRODUCT_ID)[0]
ElementTree.SubElement(driverdef, 'key').text = 'KeyboardUsageMap'
map_dict = ElementTree.SubElement(driverdef, 'dict')
ElementTree.SubElement(map_dict, 'key').text = '0x00070039'
ElementTree.SubElement(map_dict, 'integer').text = '0x00070029'
os.rename(PLIST_PATH, PLIST_PATH + '.bak')
with open(PLIST_PATH, 'w') as f:
treestr = ElementTree.tostring(plist.getroot(),
encoding='UTF-8').replace(' />', '/>')
f.write("%s\n%s" % (HEADER, treestr))
subprocess.call("touch /System/Library/Extensions && sudo kextcache -update-volume /".split())
|
<commit_before><commit_msg>Add script for changing caps lock to escape<commit_after>
|
#!/usr/bin/env python
#
# Change caps lock to escape on built in keyboards
#
# https://github.com/tekezo/Seil/issues/68#issuecomment-229260237
# Must be run with sudo, SIP must be disabled, requires restart
#
from xml.etree import ElementTree
import argparse
import os
import subprocess
# Find this product and the vendor ID by checking "System Information" -> USB -> Apple Internal Keyboard / Trackpad
PRODUCT_ID = 610
# Find this by cross referencing the vendor and product ID (in base 10) with
# what's in this plist. If you can't find it here check either of these:
#
# /System/Library/Extensions/AppleHIDKeyboard.kext/Contents/Info.plist
# /System/Library/Extensions/AppleTopCase.kext/Contents/PlugIns/AppleTopCaseHIDEventDriver.kext/Contents/Info.plist
#
PLIST_PATH = "/System/Library/Extensions/AppleUSBTopCase.kext/Contents/PlugIns/AppleUSBTCKeyEventDriver.kext/Contents/Info.plist"
HEADER = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">"""
plist = ElementTree.parse(PLIST_PATH)
driverdef = plist.findall(".//*[key='ProductID']/[integer='%d']"
% PRODUCT_ID)[0]
ElementTree.SubElement(driverdef, 'key').text = 'KeyboardUsageMap'
map_dict = ElementTree.SubElement(driverdef, 'dict')
ElementTree.SubElement(map_dict, 'key').text = '0x00070039'
ElementTree.SubElement(map_dict, 'integer').text = '0x00070029'
os.rename(PLIST_PATH, PLIST_PATH + '.bak')
with open(PLIST_PATH, 'w') as f:
treestr = ElementTree.tostring(plist.getroot(),
encoding='UTF-8').replace(' />', '/>')
f.write("%s\n%s" % (HEADER, treestr))
subprocess.call("touch /System/Library/Extensions && sudo kextcache -update-volume /".split())
|
Add script for changing caps lock to escape#!/usr/bin/env python
#
# Change caps lock to escape on built in keyboards
#
# https://github.com/tekezo/Seil/issues/68#issuecomment-229260237
# Must be run with sudo, SIP must be disabled, requires restart
#
from xml.etree import ElementTree
import argparse
import os
import subprocess
# Find this product and the vendor ID by checking "System Information" -> USB -> Apple Internal Keyboard / Trackpad
PRODUCT_ID = 610
# Find this by cross referencing the vendor and product ID (in base 10) with
# what's in this plist. If you can't find it here check either of these:
#
# /System/Library/Extensions/AppleHIDKeyboard.kext/Contents/Info.plist
# /System/Library/Extensions/AppleTopCase.kext/Contents/PlugIns/AppleTopCaseHIDEventDriver.kext/Contents/Info.plist
#
PLIST_PATH = "/System/Library/Extensions/AppleUSBTopCase.kext/Contents/PlugIns/AppleUSBTCKeyEventDriver.kext/Contents/Info.plist"
HEADER = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">"""
plist = ElementTree.parse(PLIST_PATH)
driverdef = plist.findall(".//*[key='ProductID']/[integer='%d']"
% PRODUCT_ID)[0]
ElementTree.SubElement(driverdef, 'key').text = 'KeyboardUsageMap'
map_dict = ElementTree.SubElement(driverdef, 'dict')
ElementTree.SubElement(map_dict, 'key').text = '0x00070039'
ElementTree.SubElement(map_dict, 'integer').text = '0x00070029'
os.rename(PLIST_PATH, PLIST_PATH + '.bak')
with open(PLIST_PATH, 'w') as f:
treestr = ElementTree.tostring(plist.getroot(),
encoding='UTF-8').replace(' />', '/>')
f.write("%s\n%s" % (HEADER, treestr))
subprocess.call("touch /System/Library/Extensions && sudo kextcache -update-volume /".split())
|
<commit_before><commit_msg>Add script for changing caps lock to escape<commit_after>#!/usr/bin/env python
#
# Change caps lock to escape on built in keyboards
#
# https://github.com/tekezo/Seil/issues/68#issuecomment-229260237
# Must be run with sudo, SIP must be disabled, requires restart
#
from xml.etree import ElementTree
import argparse
import os
import subprocess
# Find this product and the vendor ID by checking "System Information" -> USB -> Apple Internal Keyboard / Trackpad
PRODUCT_ID = 610
# Find this by cross referencing the vendor and product ID (in base 10) with
# what's in this plist. If you can't find it here check either of these:
#
# /System/Library/Extensions/AppleHIDKeyboard.kext/Contents/Info.plist
# /System/Library/Extensions/AppleTopCase.kext/Contents/PlugIns/AppleTopCaseHIDEventDriver.kext/Contents/Info.plist
#
PLIST_PATH = "/System/Library/Extensions/AppleUSBTopCase.kext/Contents/PlugIns/AppleUSBTCKeyEventDriver.kext/Contents/Info.plist"
HEADER = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">"""
plist = ElementTree.parse(PLIST_PATH)
driverdef = plist.findall(".//*[key='ProductID']/[integer='%d']"
% PRODUCT_ID)[0]
ElementTree.SubElement(driverdef, 'key').text = 'KeyboardUsageMap'
map_dict = ElementTree.SubElement(driverdef, 'dict')
ElementTree.SubElement(map_dict, 'key').text = '0x00070039'
ElementTree.SubElement(map_dict, 'integer').text = '0x00070029'
os.rename(PLIST_PATH, PLIST_PATH + '.bak')
with open(PLIST_PATH, 'w') as f:
treestr = ElementTree.tostring(plist.getroot(),
encoding='UTF-8').replace(' />', '/>')
f.write("%s\n%s" % (HEADER, treestr))
subprocess.call("touch /System/Library/Extensions && sudo kextcache -update-volume /".split())
|
|
4284afb497dae4923a856785d17b4b6d145fc48a
|
virtualfish/test/test_addpath.py
|
virtualfish/test/test_addpath.py
|
import sys
def test_addpath(vf):
venv = f"{vf.homedir}/.virtualenvs/test"
vf.run("vf new test")
vf.run("vf activate test")
vf.run(f"mkdir {venv}/testpath")
vf.run(f"vf addpath {venv}/testpath")
out, _ = vf.run(f"{venv}/bin/python -c 'import sys; print(sys.path)'")
assert ".virtualenvs/test/testpath" in str(out)
|
Add test for `vf addpath <path>`
|
Add test for `vf addpath <path>`
|
Python
|
mit
|
adambrenecki/virtualfish,adambrenecki/virtualfish
|
Add test for `vf addpath <path>`
|
import sys
def test_addpath(vf):
venv = f"{vf.homedir}/.virtualenvs/test"
vf.run("vf new test")
vf.run("vf activate test")
vf.run(f"mkdir {venv}/testpath")
vf.run(f"vf addpath {venv}/testpath")
out, _ = vf.run(f"{venv}/bin/python -c 'import sys; print(sys.path)'")
assert ".virtualenvs/test/testpath" in str(out)
|
<commit_before><commit_msg>Add test for `vf addpath <path>`<commit_after>
|
import sys
def test_addpath(vf):
venv = f"{vf.homedir}/.virtualenvs/test"
vf.run("vf new test")
vf.run("vf activate test")
vf.run(f"mkdir {venv}/testpath")
vf.run(f"vf addpath {venv}/testpath")
out, _ = vf.run(f"{venv}/bin/python -c 'import sys; print(sys.path)'")
assert ".virtualenvs/test/testpath" in str(out)
|
Add test for `vf addpath <path>`import sys
def test_addpath(vf):
venv = f"{vf.homedir}/.virtualenvs/test"
vf.run("vf new test")
vf.run("vf activate test")
vf.run(f"mkdir {venv}/testpath")
vf.run(f"vf addpath {venv}/testpath")
out, _ = vf.run(f"{venv}/bin/python -c 'import sys; print(sys.path)'")
assert ".virtualenvs/test/testpath" in str(out)
|
<commit_before><commit_msg>Add test for `vf addpath <path>`<commit_after>import sys
def test_addpath(vf):
venv = f"{vf.homedir}/.virtualenvs/test"
vf.run("vf new test")
vf.run("vf activate test")
vf.run(f"mkdir {venv}/testpath")
vf.run(f"vf addpath {venv}/testpath")
out, _ = vf.run(f"{venv}/bin/python -c 'import sys; print(sys.path)'")
assert ".virtualenvs/test/testpath" in str(out)
|
|
bcb03670ac1a561899f98c22635092dc5be11770
|
tests/test_permissions.py
|
tests/test_permissions.py
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
private_test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private/'))
def test_nx():
nx_amd64 = angr.Project(test_location + "/x86_64/memmove")
es = nx_amd64.factory.entry_state()
# .text should be PROT_READ|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(nx_amd64.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 3)
def test_no_nx():
no_nx_i386 = angr.Project(private_test_location + "/cgc_scored_event_1/cgc/0b32aa01_01")
es = no_nx_i386.factory.entry_state()
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(no_nx_i386.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 7)
if __name__ == "__main__":
test_nx()
test_no_nx()
|
Add simple testcase for page permissions
|
Add simple testcase for page permissions
|
Python
|
bsd-2-clause
|
iamahuman/angr,tyb0807/angr,axt/angr,f-prettyland/angr,angr/angr,tyb0807/angr,angr/angr,axt/angr,schieb/angr,f-prettyland/angr,chubbymaggie/angr,haylesr/angr,schieb/angr,chubbymaggie/angr,iamahuman/angr,angr/angr,iamahuman/angr,tyb0807/angr,axt/angr,schieb/angr,chubbymaggie/angr,f-prettyland/angr,haylesr/angr
|
Add simple testcase for page permissions
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
private_test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private/'))
def test_nx():
nx_amd64 = angr.Project(test_location + "/x86_64/memmove")
es = nx_amd64.factory.entry_state()
# .text should be PROT_READ|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(nx_amd64.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 3)
def test_no_nx():
no_nx_i386 = angr.Project(private_test_location + "/cgc_scored_event_1/cgc/0b32aa01_01")
es = no_nx_i386.factory.entry_state()
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(no_nx_i386.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 7)
if __name__ == "__main__":
test_nx()
test_no_nx()
|
<commit_before><commit_msg>Add simple testcase for page permissions<commit_after>
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
private_test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private/'))
def test_nx():
nx_amd64 = angr.Project(test_location + "/x86_64/memmove")
es = nx_amd64.factory.entry_state()
# .text should be PROT_READ|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(nx_amd64.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 3)
def test_no_nx():
no_nx_i386 = angr.Project(private_test_location + "/cgc_scored_event_1/cgc/0b32aa01_01")
es = no_nx_i386.factory.entry_state()
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(no_nx_i386.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 7)
if __name__ == "__main__":
test_nx()
test_no_nx()
|
Add simple testcase for page permissionsimport nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
private_test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private/'))
def test_nx():
nx_amd64 = angr.Project(test_location + "/x86_64/memmove")
es = nx_amd64.factory.entry_state()
# .text should be PROT_READ|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(nx_amd64.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 3)
def test_no_nx():
no_nx_i386 = angr.Project(private_test_location + "/cgc_scored_event_1/cgc/0b32aa01_01")
es = no_nx_i386.factory.entry_state()
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(no_nx_i386.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 7)
if __name__ == "__main__":
test_nx()
test_no_nx()
|
<commit_before><commit_msg>Add simple testcase for page permissions<commit_after>import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
private_test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private/'))
def test_nx():
nx_amd64 = angr.Project(test_location + "/x86_64/memmove")
es = nx_amd64.factory.entry_state()
# .text should be PROT_READ|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(nx_amd64.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 3)
def test_no_nx():
no_nx_i386 = angr.Project(private_test_location + "/cgc_scored_event_1/cgc/0b32aa01_01")
es = no_nx_i386.factory.entry_state()
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(no_nx_i386.entry)), 5)
# load stack to initialize page
es.memory.load(es.regs.sp, 4)
# stack should be PROT_READ|PROT_WRITE|PROT_EXEC
nose.tools.assert_equal(es.se.any_int(es.memory.permissions(es.regs.sp)), 7)
if __name__ == "__main__":
test_nx()
test_no_nx()
|
|
f4a28ea7d513dc3a52758966fcc2682e23b05b4c
|
lp-build-juju-snap.py
|
lp-build-juju-snap.py
|
#! /usr/bin/python
import os
import time
from datetime import datetime
from launchpadlib.launchpad import Launchpad
# basic data
arches = ['amd64', 'arm64', 'ppc64el']
series = 'xenial'
# basic paths
home = os.getenv("HOME")
workdir = os.path.join(home, "juju-daily-snap")
# we need to store credentials once for cronned builds
cachedir = os.path.join(workdir, "cache")
creds = os.path.join(workdir, "credentials")
# log in
launchpad = Launchpad.login_with('Juju Snap Builds',
'production', cachedir,
credentials_file=creds,
version='devel')
# get team data and ppa
jujuqa = launchpad.people['jujuisquality']
# get snap
juju_snap = launchpad.snaps.getByName(name='juju-edge',
owner=jujuqa)
# get distro info
ubuntu = launchpad.distributions['ubuntu']
release = ubuntu.getSeries(name_or_version=series)
# print a stamp
stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("Trying to trigger builds at: {}".format(stamp))
# loop over arches and trigger builds
mybuilds = []
for buildarch in arches:
arch = release.getDistroArchSeries(archtag=buildarch)
request = juju_snap.requestBuild(archive=release.main_archive,
distro_arch_series=arch,
pocket='Updates')
buildid = str(request).rsplit('/', 1)[-1]
mybuilds.append(buildid)
print("Arch: {} is building under: {}".format(buildarch,
request))
# check the status each minute til all builds have finished
failures = []
while len(mybuilds):
for build in mybuilds:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[build])
status = response[build]['status']
if status == "FULLYBUILT":
mybuilds.remove(build)
continue
elif status == "FAILEDTOBUILD":
failures.append(build)
mybuilds.remove(build)
continue
elif status == "CANCELLED":
mybuilds.remove(build)
continue
time.sleep(60)
# if we had failures, raise them
if len(failures):
for failure in failures:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[failure])
buildlog = response[build]['build_log_url']
if buildlog != 'None':
print(buildlog)
arch = str(buildlog).split('_')[4]
raise("juju snap {} build at {} failed for id: {} log: {}".format(
arch, stamp, failure, buildlog))
print("Builds complete")
|
Add existin lp build juju snap script
|
Add existin lp build juju snap script
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
Add existin lp build juju snap script
|
#! /usr/bin/python
import os
import time
from datetime import datetime
from launchpadlib.launchpad import Launchpad
# basic data
arches = ['amd64', 'arm64', 'ppc64el']
series = 'xenial'
# basic paths
home = os.getenv("HOME")
workdir = os.path.join(home, "juju-daily-snap")
# we need to store credentials once for cronned builds
cachedir = os.path.join(workdir, "cache")
creds = os.path.join(workdir, "credentials")
# log in
launchpad = Launchpad.login_with('Juju Snap Builds',
'production', cachedir,
credentials_file=creds,
version='devel')
# get team data and ppa
jujuqa = launchpad.people['jujuisquality']
# get snap
juju_snap = launchpad.snaps.getByName(name='juju-edge',
owner=jujuqa)
# get distro info
ubuntu = launchpad.distributions['ubuntu']
release = ubuntu.getSeries(name_or_version=series)
# print a stamp
stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("Trying to trigger builds at: {}".format(stamp))
# loop over arches and trigger builds
mybuilds = []
for buildarch in arches:
arch = release.getDistroArchSeries(archtag=buildarch)
request = juju_snap.requestBuild(archive=release.main_archive,
distro_arch_series=arch,
pocket='Updates')
buildid = str(request).rsplit('/', 1)[-1]
mybuilds.append(buildid)
print("Arch: {} is building under: {}".format(buildarch,
request))
# check the status each minute til all builds have finished
failures = []
while len(mybuilds):
for build in mybuilds:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[build])
status = response[build]['status']
if status == "FULLYBUILT":
mybuilds.remove(build)
continue
elif status == "FAILEDTOBUILD":
failures.append(build)
mybuilds.remove(build)
continue
elif status == "CANCELLED":
mybuilds.remove(build)
continue
time.sleep(60)
# if we had failures, raise them
if len(failures):
for failure in failures:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[failure])
buildlog = response[build]['build_log_url']
if buildlog != 'None':
print(buildlog)
arch = str(buildlog).split('_')[4]
raise("juju snap {} build at {} failed for id: {} log: {}".format(
arch, stamp, failure, buildlog))
print("Builds complete")
|
<commit_before><commit_msg>Add existin lp build juju snap script<commit_after>
|
#! /usr/bin/python
import os
import time
from datetime import datetime
from launchpadlib.launchpad import Launchpad
# basic data
arches = ['amd64', 'arm64', 'ppc64el']
series = 'xenial'
# basic paths
home = os.getenv("HOME")
workdir = os.path.join(home, "juju-daily-snap")
# we need to store credentials once for cronned builds
cachedir = os.path.join(workdir, "cache")
creds = os.path.join(workdir, "credentials")
# log in
launchpad = Launchpad.login_with('Juju Snap Builds',
'production', cachedir,
credentials_file=creds,
version='devel')
# get team data and ppa
jujuqa = launchpad.people['jujuisquality']
# get snap
juju_snap = launchpad.snaps.getByName(name='juju-edge',
owner=jujuqa)
# get distro info
ubuntu = launchpad.distributions['ubuntu']
release = ubuntu.getSeries(name_or_version=series)
# print a stamp
stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("Trying to trigger builds at: {}".format(stamp))
# loop over arches and trigger builds
mybuilds = []
for buildarch in arches:
arch = release.getDistroArchSeries(archtag=buildarch)
request = juju_snap.requestBuild(archive=release.main_archive,
distro_arch_series=arch,
pocket='Updates')
buildid = str(request).rsplit('/', 1)[-1]
mybuilds.append(buildid)
print("Arch: {} is building under: {}".format(buildarch,
request))
# check the status each minute til all builds have finished
failures = []
while len(mybuilds):
for build in mybuilds:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[build])
status = response[build]['status']
if status == "FULLYBUILT":
mybuilds.remove(build)
continue
elif status == "FAILEDTOBUILD":
failures.append(build)
mybuilds.remove(build)
continue
elif status == "CANCELLED":
mybuilds.remove(build)
continue
time.sleep(60)
# if we had failures, raise them
if len(failures):
for failure in failures:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[failure])
buildlog = response[build]['build_log_url']
if buildlog != 'None':
print(buildlog)
arch = str(buildlog).split('_')[4]
raise("juju snap {} build at {} failed for id: {} log: {}".format(
arch, stamp, failure, buildlog))
print("Builds complete")
|
Add existin lp build juju snap script#! /usr/bin/python
import os
import time
from datetime import datetime
from launchpadlib.launchpad import Launchpad
# basic data
arches = ['amd64', 'arm64', 'ppc64el']
series = 'xenial'
# basic paths
home = os.getenv("HOME")
workdir = os.path.join(home, "juju-daily-snap")
# we need to store credentials once for cronned builds
cachedir = os.path.join(workdir, "cache")
creds = os.path.join(workdir, "credentials")
# log in
launchpad = Launchpad.login_with('Juju Snap Builds',
'production', cachedir,
credentials_file=creds,
version='devel')
# get team data and ppa
jujuqa = launchpad.people['jujuisquality']
# get snap
juju_snap = launchpad.snaps.getByName(name='juju-edge',
owner=jujuqa)
# get distro info
ubuntu = launchpad.distributions['ubuntu']
release = ubuntu.getSeries(name_or_version=series)
# print a stamp
stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("Trying to trigger builds at: {}".format(stamp))
# loop over arches and trigger builds
mybuilds = []
for buildarch in arches:
arch = release.getDistroArchSeries(archtag=buildarch)
request = juju_snap.requestBuild(archive=release.main_archive,
distro_arch_series=arch,
pocket='Updates')
buildid = str(request).rsplit('/', 1)[-1]
mybuilds.append(buildid)
print("Arch: {} is building under: {}".format(buildarch,
request))
# check the status each minute til all builds have finished
failures = []
while len(mybuilds):
for build in mybuilds:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[build])
status = response[build]['status']
if status == "FULLYBUILT":
mybuilds.remove(build)
continue
elif status == "FAILEDTOBUILD":
failures.append(build)
mybuilds.remove(build)
continue
elif status == "CANCELLED":
mybuilds.remove(build)
continue
time.sleep(60)
# if we had failures, raise them
if len(failures):
for failure in failures:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[failure])
buildlog = response[build]['build_log_url']
if buildlog != 'None':
print(buildlog)
arch = str(buildlog).split('_')[4]
raise("juju snap {} build at {} failed for id: {} log: {}".format(
arch, stamp, failure, buildlog))
print("Builds complete")
|
<commit_before><commit_msg>Add existin lp build juju snap script<commit_after>#! /usr/bin/python
import os
import time
from datetime import datetime
from launchpadlib.launchpad import Launchpad
# basic data
arches = ['amd64', 'arm64', 'ppc64el']
series = 'xenial'
# basic paths
home = os.getenv("HOME")
workdir = os.path.join(home, "juju-daily-snap")
# we need to store credentials once for cronned builds
cachedir = os.path.join(workdir, "cache")
creds = os.path.join(workdir, "credentials")
# log in
launchpad = Launchpad.login_with('Juju Snap Builds',
'production', cachedir,
credentials_file=creds,
version='devel')
# get team data and ppa
jujuqa = launchpad.people['jujuisquality']
# get snap
juju_snap = launchpad.snaps.getByName(name='juju-edge',
owner=jujuqa)
# get distro info
ubuntu = launchpad.distributions['ubuntu']
release = ubuntu.getSeries(name_or_version=series)
# print a stamp
stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("Trying to trigger builds at: {}".format(stamp))
# loop over arches and trigger builds
mybuilds = []
for buildarch in arches:
arch = release.getDistroArchSeries(archtag=buildarch)
request = juju_snap.requestBuild(archive=release.main_archive,
distro_arch_series=arch,
pocket='Updates')
buildid = str(request).rsplit('/', 1)[-1]
mybuilds.append(buildid)
print("Arch: {} is building under: {}".format(buildarch,
request))
# check the status each minute til all builds have finished
failures = []
while len(mybuilds):
for build in mybuilds:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[build])
status = response[build]['status']
if status == "FULLYBUILT":
mybuilds.remove(build)
continue
elif status == "FAILEDTOBUILD":
failures.append(build)
mybuilds.remove(build)
continue
elif status == "CANCELLED":
mybuilds.remove(build)
continue
time.sleep(60)
# if we had failures, raise them
if len(failures):
for failure in failures:
response = juju_snap.getBuildSummariesForSnapBuildIds(
snap_build_ids=[failure])
buildlog = response[build]['build_log_url']
if buildlog != 'None':
print(buildlog)
arch = str(buildlog).split('_')[4]
raise("juju snap {} build at {} failed for id: {} log: {}".format(
arch, stamp, failure, buildlog))
print("Builds complete")
|
|
97e6f41ba5eef2a5c2f5dc0d25b7556ccc95351f
|
nodeconductor/logging/migrations/0002_alter_alert_severity_field.py
|
nodeconductor/logging/migrations/0002_alter_alert_severity_field.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alert',
name='severity',
field=models.SmallIntegerField(choices=[(10, b'Debug'), (20, b'Info'), (30, b'Warning'), (40, b'Error'), (50, b'Critical')]),
preserve_default=True,
),
]
|
Add migration for alert severity field (nc-553)
|
Add migration for alert severity field (nc-553)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Add migration for alert severity field (nc-553)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alert',
name='severity',
field=models.SmallIntegerField(choices=[(10, b'Debug'), (20, b'Info'), (30, b'Warning'), (40, b'Error'), (50, b'Critical')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for alert severity field (nc-553)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alert',
name='severity',
field=models.SmallIntegerField(choices=[(10, b'Debug'), (20, b'Info'), (30, b'Warning'), (40, b'Error'), (50, b'Critical')]),
preserve_default=True,
),
]
|
Add migration for alert severity field (nc-553)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alert',
name='severity',
field=models.SmallIntegerField(choices=[(10, b'Debug'), (20, b'Info'), (30, b'Warning'), (40, b'Error'), (50, b'Critical')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for alert severity field (nc-553)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alert',
name='severity',
field=models.SmallIntegerField(choices=[(10, b'Debug'), (20, b'Info'), (30, b'Warning'), (40, b'Error'), (50, b'Critical')]),
preserve_default=True,
),
]
|
|
2337f61b9da2368dd7847e8f8f727e96cd1150b3
|
cw_controller.py
|
cw_controller.py
|
import restapi
# Class for /company/companies
import connectpyse
class CWController(restapi.Client):
def __init__(self):
self.module_url = ''
super().__init__('{}/{}'.format(connectpyse.API_URL, self.module_url))
def get_companies(self, user_params={}):
json_results = self.companies.get(user_headers=connectpyse.basic_auth, user_params=user_params)
for json in json_results:
yield company.Company(json)
def create_company(self):
pass
def get_companies_count(self):
json_results = self.companies.get(the_id='count', user_headers=connectpyse.basic_auth)
count = json_results['count']
return count
def get_company_by_id(self, company_id):
a_company = company.Company(self.companies.get(the_id=company_id, user_headers=connectpyse.basic_auth))
return a_company
def delete_company_by_id(self, company_id ):
pass
def replace_company(self, company_id):
pass
def update_company(self, company_id):
pass
def merge_company(self, company_id):
pass
|
Add controller super class draft via upload
|
Add controller super class draft via upload
|
Python
|
mit
|
joshuamsmith/ConnectPyse
|
Add controller super class draft via upload
|
import restapi
# Class for /company/companies
import connectpyse
class CWController(restapi.Client):
def __init__(self):
self.module_url = ''
super().__init__('{}/{}'.format(connectpyse.API_URL, self.module_url))
def get_companies(self, user_params={}):
json_results = self.companies.get(user_headers=connectpyse.basic_auth, user_params=user_params)
for json in json_results:
yield company.Company(json)
def create_company(self):
pass
def get_companies_count(self):
json_results = self.companies.get(the_id='count', user_headers=connectpyse.basic_auth)
count = json_results['count']
return count
def get_company_by_id(self, company_id):
a_company = company.Company(self.companies.get(the_id=company_id, user_headers=connectpyse.basic_auth))
return a_company
def delete_company_by_id(self, company_id ):
pass
def replace_company(self, company_id):
pass
def update_company(self, company_id):
pass
def merge_company(self, company_id):
pass
|
<commit_before><commit_msg>Add controller super class draft via upload<commit_after>
|
import restapi
# Class for /company/companies
import connectpyse
class CWController(restapi.Client):
def __init__(self):
self.module_url = ''
super().__init__('{}/{}'.format(connectpyse.API_URL, self.module_url))
def get_companies(self, user_params={}):
json_results = self.companies.get(user_headers=connectpyse.basic_auth, user_params=user_params)
for json in json_results:
yield company.Company(json)
def create_company(self):
pass
def get_companies_count(self):
json_results = self.companies.get(the_id='count', user_headers=connectpyse.basic_auth)
count = json_results['count']
return count
def get_company_by_id(self, company_id):
a_company = company.Company(self.companies.get(the_id=company_id, user_headers=connectpyse.basic_auth))
return a_company
def delete_company_by_id(self, company_id ):
pass
def replace_company(self, company_id):
pass
def update_company(self, company_id):
pass
def merge_company(self, company_id):
pass
|
Add controller super class draft via uploadimport restapi
# Class for /company/companies
import connectpyse
class CWController(restapi.Client):
def __init__(self):
self.module_url = ''
super().__init__('{}/{}'.format(connectpyse.API_URL, self.module_url))
def get_companies(self, user_params={}):
json_results = self.companies.get(user_headers=connectpyse.basic_auth, user_params=user_params)
for json in json_results:
yield company.Company(json)
def create_company(self):
pass
def get_companies_count(self):
json_results = self.companies.get(the_id='count', user_headers=connectpyse.basic_auth)
count = json_results['count']
return count
def get_company_by_id(self, company_id):
a_company = company.Company(self.companies.get(the_id=company_id, user_headers=connectpyse.basic_auth))
return a_company
def delete_company_by_id(self, company_id ):
pass
def replace_company(self, company_id):
pass
def update_company(self, company_id):
pass
def merge_company(self, company_id):
pass
|
<commit_before><commit_msg>Add controller super class draft via upload<commit_after>import restapi
# Class for /company/companies
import connectpyse
class CWController(restapi.Client):
def __init__(self):
self.module_url = ''
super().__init__('{}/{}'.format(connectpyse.API_URL, self.module_url))
def get_companies(self, user_params={}):
json_results = self.companies.get(user_headers=connectpyse.basic_auth, user_params=user_params)
for json in json_results:
yield company.Company(json)
def create_company(self):
pass
def get_companies_count(self):
json_results = self.companies.get(the_id='count', user_headers=connectpyse.basic_auth)
count = json_results['count']
return count
def get_company_by_id(self, company_id):
a_company = company.Company(self.companies.get(the_id=company_id, user_headers=connectpyse.basic_auth))
return a_company
def delete_company_by_id(self, company_id ):
pass
def replace_company(self, company_id):
pass
def update_company(self, company_id):
pass
def merge_company(self, company_id):
pass
|
|
4a41397aefcee66d5d4d06b44f94d1337a154d98
|
ynr/apps/popolo/migrations/0035_attach_memberships_to_posts.py
|
ynr/apps/popolo/migrations/0035_attach_memberships_to_posts.py
|
# Generated by Django 2.2.9 on 2020-01-27 21:19
from django.db import migrations
def attach_memberships_to_posts(apps, schema_editor):
Membership = apps.get_model("popolo", "Membership")
for membership in Membership.objects.all():
if membership.post != membership.ballot.post:
membership.post = membership.ballot.post
membership.save()
class Migration(migrations.Migration):
dependencies = [("popolo", "0034_populate_from_ee_divisions")]
operations = [
migrations.RunPython(
attach_memberships_to_posts, migrations.RunPython.noop
)
]
|
Set all membership.post objects to membership.ballot.post
|
Set all membership.post objects to membership.ballot.post
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
Set all membership.post objects to membership.ballot.post
|
# Generated by Django 2.2.9 on 2020-01-27 21:19
from django.db import migrations
def attach_memberships_to_posts(apps, schema_editor):
Membership = apps.get_model("popolo", "Membership")
for membership in Membership.objects.all():
if membership.post != membership.ballot.post:
membership.post = membership.ballot.post
membership.save()
class Migration(migrations.Migration):
dependencies = [("popolo", "0034_populate_from_ee_divisions")]
operations = [
migrations.RunPython(
attach_memberships_to_posts, migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Set all membership.post objects to membership.ballot.post<commit_after>
|
# Generated by Django 2.2.9 on 2020-01-27 21:19
from django.db import migrations
def attach_memberships_to_posts(apps, schema_editor):
Membership = apps.get_model("popolo", "Membership")
for membership in Membership.objects.all():
if membership.post != membership.ballot.post:
membership.post = membership.ballot.post
membership.save()
class Migration(migrations.Migration):
dependencies = [("popolo", "0034_populate_from_ee_divisions")]
operations = [
migrations.RunPython(
attach_memberships_to_posts, migrations.RunPython.noop
)
]
|
Set all membership.post objects to membership.ballot.post# Generated by Django 2.2.9 on 2020-01-27 21:19
from django.db import migrations
def attach_memberships_to_posts(apps, schema_editor):
Membership = apps.get_model("popolo", "Membership")
for membership in Membership.objects.all():
if membership.post != membership.ballot.post:
membership.post = membership.ballot.post
membership.save()
class Migration(migrations.Migration):
dependencies = [("popolo", "0034_populate_from_ee_divisions")]
operations = [
migrations.RunPython(
attach_memberships_to_posts, migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Set all membership.post objects to membership.ballot.post<commit_after># Generated by Django 2.2.9 on 2020-01-27 21:19
from django.db import migrations
def attach_memberships_to_posts(apps, schema_editor):
Membership = apps.get_model("popolo", "Membership")
for membership in Membership.objects.all():
if membership.post != membership.ballot.post:
membership.post = membership.ballot.post
membership.save()
class Migration(migrations.Migration):
dependencies = [("popolo", "0034_populate_from_ee_divisions")]
operations = [
migrations.RunPython(
attach_memberships_to_posts, migrations.RunPython.noop
)
]
|
|
2a4b9c1b7628ab7088c01f6590b31d1c8c9a5958
|
Classification/trump_plot.py
|
Classification/trump_plot.py
|
import sys, os
foo_dir = os.path.dirname(os.path.join(os.getcwd(), __file__))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../DataGathering', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../Classification', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../TextCleaning', '..')))
sys.path.insert(0, '../DataGathering/')
from svm_classifier import train_and_predict
import matplotlib.pyplot as plt
import datetime
import csv
def classify():
tweet_texts = []
tweets = []
i = 0
with open('../../480k_trump_merged.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
tweet_texts.append(row[1])
tweets.append(row)
i += 1
if i > 300000000:
break
del tweet_texts[0]
del tweets[0]
results = train_and_predict(tweet_texts)
with open('../../480k_trump_classified.csv', 'w+', encoding='utf-8') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
for i in range(0, len(results)):
csv_writer.writerow(tweets[i]+[results[i]])
def plot():
values = []
timestamps = []
data = []
with open('../../480k_trump_classified.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
raw_date = row[6]
value = row[7]
timestamp = datetime.datetime.strptime(raw_date, "%a %b %d %H:%M:%S %z %Y").timestamp()
values.append(value)
timestamps.append(timestamp)
data.append([timestamp, value])
data = sorted(data, key=lambda x: x[0])
maxTimestamp = data[-1][0]
minTimestamp = data[0][0]
bin_size = 1000*3600
hours = int((maxTimestamp-minTimestamp)/bin_size)
hourLists = [[] for _ in range(hours)]
print(len(hourLists))
for i in range(0,len(data)):
h = int((data[i][0]-minTimestamp)/bin_size)-1
print(h)
hourLists[h].append(int(data[i][1]))
values = []
for hourList in hourLists:
if len(hourList) >= 4:
values.append(sum(hourList)/max(len(hourList), 1))
plt.plot(range(0, len(values)), values, 'ro')
plt.show()
#classify()
plot()
|
Add script to plot insights from Trump’s tweets
|
Add script to plot insights from Trump’s tweets
|
Python
|
mit
|
steinnp/Big-Data-Final
|
Add script to plot insights from Trump’s tweets
|
import sys, os
foo_dir = os.path.dirname(os.path.join(os.getcwd(), __file__))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../DataGathering', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../Classification', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../TextCleaning', '..')))
sys.path.insert(0, '../DataGathering/')
from svm_classifier import train_and_predict
import matplotlib.pyplot as plt
import datetime
import csv
def classify():
tweet_texts = []
tweets = []
i = 0
with open('../../480k_trump_merged.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
tweet_texts.append(row[1])
tweets.append(row)
i += 1
if i > 300000000:
break
del tweet_texts[0]
del tweets[0]
results = train_and_predict(tweet_texts)
with open('../../480k_trump_classified.csv', 'w+', encoding='utf-8') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
for i in range(0, len(results)):
csv_writer.writerow(tweets[i]+[results[i]])
def plot():
values = []
timestamps = []
data = []
with open('../../480k_trump_classified.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
raw_date = row[6]
value = row[7]
timestamp = datetime.datetime.strptime(raw_date, "%a %b %d %H:%M:%S %z %Y").timestamp()
values.append(value)
timestamps.append(timestamp)
data.append([timestamp, value])
data = sorted(data, key=lambda x: x[0])
maxTimestamp = data[-1][0]
minTimestamp = data[0][0]
bin_size = 1000*3600
hours = int((maxTimestamp-minTimestamp)/bin_size)
hourLists = [[] for _ in range(hours)]
print(len(hourLists))
for i in range(0,len(data)):
h = int((data[i][0]-minTimestamp)/bin_size)-1
print(h)
hourLists[h].append(int(data[i][1]))
values = []
for hourList in hourLists:
if len(hourList) >= 4:
values.append(sum(hourList)/max(len(hourList), 1))
plt.plot(range(0, len(values)), values, 'ro')
plt.show()
#classify()
plot()
|
<commit_before><commit_msg>Add script to plot insights from Trump’s tweets<commit_after>
|
import sys, os
foo_dir = os.path.dirname(os.path.join(os.getcwd(), __file__))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../DataGathering', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../Classification', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../TextCleaning', '..')))
sys.path.insert(0, '../DataGathering/')
from svm_classifier import train_and_predict
import matplotlib.pyplot as plt
import datetime
import csv
def classify():
tweet_texts = []
tweets = []
i = 0
with open('../../480k_trump_merged.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
tweet_texts.append(row[1])
tweets.append(row)
i += 1
if i > 300000000:
break
del tweet_texts[0]
del tweets[0]
results = train_and_predict(tweet_texts)
with open('../../480k_trump_classified.csv', 'w+', encoding='utf-8') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
for i in range(0, len(results)):
csv_writer.writerow(tweets[i]+[results[i]])
def plot():
values = []
timestamps = []
data = []
with open('../../480k_trump_classified.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
raw_date = row[6]
value = row[7]
timestamp = datetime.datetime.strptime(raw_date, "%a %b %d %H:%M:%S %z %Y").timestamp()
values.append(value)
timestamps.append(timestamp)
data.append([timestamp, value])
data = sorted(data, key=lambda x: x[0])
maxTimestamp = data[-1][0]
minTimestamp = data[0][0]
bin_size = 1000*3600
hours = int((maxTimestamp-minTimestamp)/bin_size)
hourLists = [[] for _ in range(hours)]
print(len(hourLists))
for i in range(0,len(data)):
h = int((data[i][0]-minTimestamp)/bin_size)-1
print(h)
hourLists[h].append(int(data[i][1]))
values = []
for hourList in hourLists:
if len(hourList) >= 4:
values.append(sum(hourList)/max(len(hourList), 1))
plt.plot(range(0, len(values)), values, 'ro')
plt.show()
#classify()
plot()
|
Add script to plot insights from Trump’s tweets
import sys, os
foo_dir = os.path.dirname(os.path.join(os.getcwd(), __file__))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../DataGathering', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../Classification', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../TextCleaning', '..')))
sys.path.insert(0, '../DataGathering/')
from svm_classifier import train_and_predict
import matplotlib.pyplot as plt
import datetime
import csv
def classify():
tweet_texts = []
tweets = []
i = 0
with open('../../480k_trump_merged.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
tweet_texts.append(row[1])
tweets.append(row)
i += 1
if i > 300000000:
break
del tweet_texts[0]
del tweets[0]
results = train_and_predict(tweet_texts)
with open('../../480k_trump_classified.csv', 'w+', encoding='utf-8') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
for i in range(0, len(results)):
csv_writer.writerow(tweets[i]+[results[i]])
def plot():
values = []
timestamps = []
data = []
with open('../../480k_trump_classified.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
raw_date = row[6]
value = row[7]
timestamp = datetime.datetime.strptime(raw_date, "%a %b %d %H:%M:%S %z %Y").timestamp()
values.append(value)
timestamps.append(timestamp)
data.append([timestamp, value])
data = sorted(data, key=lambda x: x[0])
maxTimestamp = data[-1][0]
minTimestamp = data[0][0]
bin_size = 1000*3600
hours = int((maxTimestamp-minTimestamp)/bin_size)
hourLists = [[] for _ in range(hours)]
print(len(hourLists))
for i in range(0,len(data)):
h = int((data[i][0]-minTimestamp)/bin_size)-1
print(h)
hourLists[h].append(int(data[i][1]))
values = []
for hourList in hourLists:
if len(hourList) >= 4:
values.append(sum(hourList)/max(len(hourList), 1))
plt.plot(range(0, len(values)), values, 'ro')
plt.show()
#classify()
plot()
|
<commit_before><commit_msg>Add script to plot insights from Trump’s tweets<commit_after>
import sys, os
foo_dir = os.path.dirname(os.path.join(os.getcwd(), __file__))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../DataGathering', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../Classification', '..')))
sys.path.append(os.path.normpath(os.path.join(foo_dir, '../TextCleaning', '..')))
sys.path.insert(0, '../DataGathering/')
from svm_classifier import train_and_predict
import matplotlib.pyplot as plt
import datetime
import csv
def classify():
tweet_texts = []
tweets = []
i = 0
with open('../../480k_trump_merged.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
tweet_texts.append(row[1])
tweets.append(row)
i += 1
if i > 300000000:
break
del tweet_texts[0]
del tweets[0]
results = train_and_predict(tweet_texts)
with open('../../480k_trump_classified.csv', 'w+', encoding='utf-8') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=',')
for i in range(0, len(results)):
csv_writer.writerow(tweets[i]+[results[i]])
def plot():
values = []
timestamps = []
data = []
with open('../../480k_trump_classified.csv', 'r', encoding='utf-8') as csvfile:
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
raw_date = row[6]
value = row[7]
timestamp = datetime.datetime.strptime(raw_date, "%a %b %d %H:%M:%S %z %Y").timestamp()
values.append(value)
timestamps.append(timestamp)
data.append([timestamp, value])
data = sorted(data, key=lambda x: x[0])
maxTimestamp = data[-1][0]
minTimestamp = data[0][0]
bin_size = 1000*3600
hours = int((maxTimestamp-minTimestamp)/bin_size)
hourLists = [[] for _ in range(hours)]
print(len(hourLists))
for i in range(0,len(data)):
h = int((data[i][0]-minTimestamp)/bin_size)-1
print(h)
hourLists[h].append(int(data[i][1]))
values = []
for hourList in hourLists:
if len(hourList) >= 4:
values.append(sum(hourList)/max(len(hourList), 1))
plt.plot(range(0, len(values)), values, 'ro')
plt.show()
#classify()
plot()
|
|
2f83109cf85d74ecd5da5a849cc165e723f8e5fd
|
CreateObject.py
|
CreateObject.py
|
for key, value in request.data.iteritems():
if value is None:
print key
if getattr(ExistingBooking, key) is not None:
value = getattr(ExistingBooking, key)
print 'VALUE::',value
setattr(Bookingupdate, key, value)
Bookingupdate.save()
|
Create Python objects using getattr and setattr
|
Create Python objects using getattr and setattr
This can be used for django models with copious fields
|
Python
|
mit
|
gladsonvm/snippets
|
Create Python objects using getattr and setattr
This can be used for django models with copious fields
|
for key, value in request.data.iteritems():
if value is None:
print key
if getattr(ExistingBooking, key) is not None:
value = getattr(ExistingBooking, key)
print 'VALUE::',value
setattr(Bookingupdate, key, value)
Bookingupdate.save()
|
<commit_before><commit_msg>Create Python objects using getattr and setattr
This can be used for django models with copious fields<commit_after>
|
for key, value in request.data.iteritems():
if value is None:
print key
if getattr(ExistingBooking, key) is not None:
value = getattr(ExistingBooking, key)
print 'VALUE::',value
setattr(Bookingupdate, key, value)
Bookingupdate.save()
|
Create Python objects using getattr and setattr
This can be used for django models with copious fieldsfor key, value in request.data.iteritems():
if value is None:
print key
if getattr(ExistingBooking, key) is not None:
value = getattr(ExistingBooking, key)
print 'VALUE::',value
setattr(Bookingupdate, key, value)
Bookingupdate.save()
|
<commit_before><commit_msg>Create Python objects using getattr and setattr
This can be used for django models with copious fields<commit_after>for key, value in request.data.iteritems():
if value is None:
print key
if getattr(ExistingBooking, key) is not None:
value = getattr(ExistingBooking, key)
print 'VALUE::',value
setattr(Bookingupdate, key, value)
Bookingupdate.save()
|
|
4630a1410f4a31865a7293eae67c7f10e06fba67
|
annocfg.py
|
annocfg.py
|
class AnnotatedCFG(object):
def __init__(self):
pass
def should_take_exit(self, addr_from, addr_to):
return False
def should_execute_statement(self, addr):
return False
def get_run(self, addr):
return None
def get_whitelisted_statement(self, addr):
return []
|
Add the skeleton of AnnotatedCFG class.
|
Add the skeleton of AnnotatedCFG class.
|
Python
|
bsd-2-clause
|
zhuyue1314/angr,cureHsu/angr,tyb0807/angr,iamahuman/angr,iamahuman/angr,mingderwang/angr,haylesr/angr,GuardianRG/angr,lowks/angr,axt/angr,chubbymaggie/angr,avain/angr,fjferrer/angr,avain/angr,angr/angr,terry2012/angr,angr/angr,f-prettyland/angr,iamahuman/angr,f-prettyland/angr,lowks/angr,xurantju/angr,cureHsu/angr,chubbymaggie/angr,schieb/angr,tyb0807/angr,angr/angr,axt/angr,fjferrer/angr,chubbymaggie/angr,zhuyue1314/angr,haylesr/angr,terry2012/angr,tyb0807/angr,schieb/angr,axt/angr,schieb/angr,GuardianRG/angr,f-prettyland/angr,xurantju/angr,mingderwang/angr
|
Add the skeleton of AnnotatedCFG class.
|
class AnnotatedCFG(object):
def __init__(self):
pass
def should_take_exit(self, addr_from, addr_to):
return False
def should_execute_statement(self, addr):
return False
def get_run(self, addr):
return None
def get_whitelisted_statement(self, addr):
return []
|
<commit_before><commit_msg>Add the skeleton of AnnotatedCFG class.<commit_after>
|
class AnnotatedCFG(object):
def __init__(self):
pass
def should_take_exit(self, addr_from, addr_to):
return False
def should_execute_statement(self, addr):
return False
def get_run(self, addr):
return None
def get_whitelisted_statement(self, addr):
return []
|
Add the skeleton of AnnotatedCFG class.
class AnnotatedCFG(object):
def __init__(self):
pass
def should_take_exit(self, addr_from, addr_to):
return False
def should_execute_statement(self, addr):
return False
def get_run(self, addr):
return None
def get_whitelisted_statement(self, addr):
return []
|
<commit_before><commit_msg>Add the skeleton of AnnotatedCFG class.<commit_after>
class AnnotatedCFG(object):
def __init__(self):
pass
def should_take_exit(self, addr_from, addr_to):
return False
def should_execute_statement(self, addr):
return False
def get_run(self, addr):
return None
def get_whitelisted_statement(self, addr):
return []
|
|
d6fb3be1566649718255601923b6ca137aeb855c
|
tools/experimental_create.py
|
tools/experimental_create.py
|
#!/usr/bin/python
import eventlet
from eventlet.green import socket
import libssh2
import time
import os
import random
import base64
import uuid
import M2Crypto
from novaclient.v1_1 import client
def instance_start(instance_name, image_name, flavor_name):
"""
Method to start an instance
"""
def _null_callback(p, n, out):
"""
Method to silence the default M2Crypto.RSA.gen_key output.
"""
pass
private_key = M2Crypto.RSA.gen_key(2048, 65537, _null_callback)
# this is the binary public key, in ssh "BN" (BigNumber) MPI format.
# The ssh BN MPI format consists of 4 bytes that describe the length
# of the following data, followed by the data itself in big-endian
# format. The start of the string is 0x0007, which represent the 7
# bytes following that make up 'ssh-rsa'. The key exponent and
# modulus as fetched out of M2Crypto are already in MPI format, so
# we can just use them as-is. We then have to base64 encode the
# result, add a little header information, and then we have a
# full public key.
username = os.environ['OS_USERNAME']
password = os.environ['OS_PASSWORD']
tenant = os.environ['OS_TENANT_NAME']
auth_url = os.environ['OS_AUTH_URL']
nova_client = client.Client(username, password, tenant, auth_url, service_type='compute', service_name='nova')
public_key_bn = '\x00\x00\x00\x07' + 'ssh-rsa' + private_key.e + private_key.n
public_key = 'ssh-rsa %s support@heat-api.org\n' % (base64.b64encode(public_key_bn))
private_key.save_key('/tmp/private_key', cipher=None)
random_uuid = uuid.uuid4()
key_uuid = uuid.uuid3(random_uuid, '%s %s %s' % (instance_name, image_name, flavor_name))
nova_client.keypairs.create(str(key_uuid), public_key)
image_list = nova_client.images.list()
for o in image_list:
if getattr(o, 'name', '') == image_name:
image_id = o.id #getattr(o, 'id', '')
flavor_list = nova_client.flavors.list()
for o in flavor_list:
if getattr(o, 'name', '') == flavor_name:
flavor_id = getattr(o, 'id', '')
nova_client.servers.create(name=instance_name, image=image_id,
flavor=flavor_id, key_name=str(key_uuid))
return private_key
instance_start('instance-F16-test', 'F16-x86_64', "m1.tiny")
|
Add code that shows how to create an instance
|
Add code that shows how to create an instance
This creates an instance with a keypair, and saves the private key to
/tmp/private_key.
Then:
ssh -i /tmp/private_key 10.0.0.x where x is the IP address
Signed-off-by: Steven Dake <8638f3fce5db0278cfbc239bd581dfc00c29ec9d@redhat.com>
|
Python
|
apache-2.0
|
pshchelo/heat,rickerc/heat_audit,steveb/heat,pshchelo/heat,dragorosson/heat,noironetworks/heat,sdake/heat-jeos,steveb/heat,dragorosson/heat,JioCloud/heat,jasondunsmore/heat,noironetworks/heat,cryptickp/heat,dims/heat,gonzolino/heat,cwolferh/heat-scratch,dims/heat,srznew/heat,Triv90/Heat,srznew/heat,varunarya10/heat,rh-s/heat,NeCTAR-RC/heat,redhat-openstack/heat,cryptickp/heat,rdo-management/heat,miguelgrinberg/heat,JioCloud/heat,steveb/heat-cfntools,rickerc/heat_audit,varunarya10/heat,maestro-hybrid-cloud/heat,redhat-openstack/heat,citrix-openstack-build/heat,steveb/heat-cfntools,Triv90/Heat,takeshineshiro/heat,NeCTAR-RC/heat,rh-s/heat,openstack/heat-cfntools,gonzolino/heat,jasondunsmore/heat,citrix-openstack-build/heat,pratikmallya/heat,openstack/heat,maestro-hybrid-cloud/heat,ntt-sic/heat,miguelgrinberg/heat,rdo-management/heat,Triv90/Heat,pratikmallya/heat,cwolferh/heat-scratch,openstack/heat,takeshineshiro/heat,bbandaru/heat-cfntools,ntt-sic/heat
|
Add code that shows how to create an instance
This creates an instance with a keypair, and saves the private key to
/tmp/private_key.
Then:
ssh -i /tmp/private_key 10.0.0.x where x is the IP address
Signed-off-by: Steven Dake <8638f3fce5db0278cfbc239bd581dfc00c29ec9d@redhat.com>
|
#!/usr/bin/python
import eventlet
from eventlet.green import socket
import libssh2
import time
import os
import random
import base64
import uuid
import M2Crypto
from novaclient.v1_1 import client
def instance_start(instance_name, image_name, flavor_name):
"""
Method to start an instance
"""
def _null_callback(p, n, out):
"""
Method to silence the default M2Crypto.RSA.gen_key output.
"""
pass
private_key = M2Crypto.RSA.gen_key(2048, 65537, _null_callback)
# this is the binary public key, in ssh "BN" (BigNumber) MPI format.
# The ssh BN MPI format consists of 4 bytes that describe the length
# of the following data, followed by the data itself in big-endian
# format. The start of the string is 0x0007, which represent the 7
# bytes following that make up 'ssh-rsa'. The key exponent and
# modulus as fetched out of M2Crypto are already in MPI format, so
# we can just use them as-is. We then have to base64 encode the
# result, add a little header information, and then we have a
# full public key.
username = os.environ['OS_USERNAME']
password = os.environ['OS_PASSWORD']
tenant = os.environ['OS_TENANT_NAME']
auth_url = os.environ['OS_AUTH_URL']
nova_client = client.Client(username, password, tenant, auth_url, service_type='compute', service_name='nova')
public_key_bn = '\x00\x00\x00\x07' + 'ssh-rsa' + private_key.e + private_key.n
public_key = 'ssh-rsa %s support@heat-api.org\n' % (base64.b64encode(public_key_bn))
private_key.save_key('/tmp/private_key', cipher=None)
random_uuid = uuid.uuid4()
key_uuid = uuid.uuid3(random_uuid, '%s %s %s' % (instance_name, image_name, flavor_name))
nova_client.keypairs.create(str(key_uuid), public_key)
image_list = nova_client.images.list()
for o in image_list:
if getattr(o, 'name', '') == image_name:
image_id = o.id #getattr(o, 'id', '')
flavor_list = nova_client.flavors.list()
for o in flavor_list:
if getattr(o, 'name', '') == flavor_name:
flavor_id = getattr(o, 'id', '')
nova_client.servers.create(name=instance_name, image=image_id,
flavor=flavor_id, key_name=str(key_uuid))
return private_key
instance_start('instance-F16-test', 'F16-x86_64', "m1.tiny")
|
<commit_before><commit_msg>Add code that shows how to create an instance
This creates an instance with a keypair, and saves the private key to
/tmp/private_key.
Then:
ssh -i /tmp/private_key 10.0.0.x where x is the IP address
Signed-off-by: Steven Dake <8638f3fce5db0278cfbc239bd581dfc00c29ec9d@redhat.com><commit_after>
|
#!/usr/bin/python
import eventlet
from eventlet.green import socket
import libssh2
import time
import os
import random
import base64
import uuid
import M2Crypto
from novaclient.v1_1 import client
def instance_start(instance_name, image_name, flavor_name):
"""
Method to start an instance
"""
def _null_callback(p, n, out):
"""
Method to silence the default M2Crypto.RSA.gen_key output.
"""
pass
private_key = M2Crypto.RSA.gen_key(2048, 65537, _null_callback)
# this is the binary public key, in ssh "BN" (BigNumber) MPI format.
# The ssh BN MPI format consists of 4 bytes that describe the length
# of the following data, followed by the data itself in big-endian
# format. The start of the string is 0x0007, which represent the 7
# bytes following that make up 'ssh-rsa'. The key exponent and
# modulus as fetched out of M2Crypto are already in MPI format, so
# we can just use them as-is. We then have to base64 encode the
# result, add a little header information, and then we have a
# full public key.
username = os.environ['OS_USERNAME']
password = os.environ['OS_PASSWORD']
tenant = os.environ['OS_TENANT_NAME']
auth_url = os.environ['OS_AUTH_URL']
nova_client = client.Client(username, password, tenant, auth_url, service_type='compute', service_name='nova')
public_key_bn = '\x00\x00\x00\x07' + 'ssh-rsa' + private_key.e + private_key.n
public_key = 'ssh-rsa %s support@heat-api.org\n' % (base64.b64encode(public_key_bn))
private_key.save_key('/tmp/private_key', cipher=None)
random_uuid = uuid.uuid4()
key_uuid = uuid.uuid3(random_uuid, '%s %s %s' % (instance_name, image_name, flavor_name))
nova_client.keypairs.create(str(key_uuid), public_key)
image_list = nova_client.images.list()
for o in image_list:
if getattr(o, 'name', '') == image_name:
image_id = o.id #getattr(o, 'id', '')
flavor_list = nova_client.flavors.list()
for o in flavor_list:
if getattr(o, 'name', '') == flavor_name:
flavor_id = getattr(o, 'id', '')
nova_client.servers.create(name=instance_name, image=image_id,
flavor=flavor_id, key_name=str(key_uuid))
return private_key
instance_start('instance-F16-test', 'F16-x86_64', "m1.tiny")
|
Add code that shows how to create an instance
This creates an instance with a keypair, and saves the private key to
/tmp/private_key.
Then:
ssh -i /tmp/private_key 10.0.0.x where x is the IP address
Signed-off-by: Steven Dake <8638f3fce5db0278cfbc239bd581dfc00c29ec9d@redhat.com>#!/usr/bin/python
import eventlet
from eventlet.green import socket
import libssh2
import time
import os
import random
import base64
import uuid
import M2Crypto
from novaclient.v1_1 import client
def instance_start(instance_name, image_name, flavor_name):
"""
Method to start an instance
"""
def _null_callback(p, n, out):
"""
Method to silence the default M2Crypto.RSA.gen_key output.
"""
pass
private_key = M2Crypto.RSA.gen_key(2048, 65537, _null_callback)
# this is the binary public key, in ssh "BN" (BigNumber) MPI format.
# The ssh BN MPI format consists of 4 bytes that describe the length
# of the following data, followed by the data itself in big-endian
# format. The start of the string is 0x0007, which represent the 7
# bytes following that make up 'ssh-rsa'. The key exponent and
# modulus as fetched out of M2Crypto are already in MPI format, so
# we can just use them as-is. We then have to base64 encode the
# result, add a little header information, and then we have a
# full public key.
username = os.environ['OS_USERNAME']
password = os.environ['OS_PASSWORD']
tenant = os.environ['OS_TENANT_NAME']
auth_url = os.environ['OS_AUTH_URL']
nova_client = client.Client(username, password, tenant, auth_url, service_type='compute', service_name='nova')
public_key_bn = '\x00\x00\x00\x07' + 'ssh-rsa' + private_key.e + private_key.n
public_key = 'ssh-rsa %s support@heat-api.org\n' % (base64.b64encode(public_key_bn))
private_key.save_key('/tmp/private_key', cipher=None)
random_uuid = uuid.uuid4()
key_uuid = uuid.uuid3(random_uuid, '%s %s %s' % (instance_name, image_name, flavor_name))
nova_client.keypairs.create(str(key_uuid), public_key)
image_list = nova_client.images.list()
for o in image_list:
if getattr(o, 'name', '') == image_name:
image_id = o.id #getattr(o, 'id', '')
flavor_list = nova_client.flavors.list()
for o in flavor_list:
if getattr(o, 'name', '') == flavor_name:
flavor_id = getattr(o, 'id', '')
nova_client.servers.create(name=instance_name, image=image_id,
flavor=flavor_id, key_name=str(key_uuid))
return private_key
instance_start('instance-F16-test', 'F16-x86_64', "m1.tiny")
|
<commit_before><commit_msg>Add code that shows how to create an instance
This creates an instance with a keypair, and saves the private key to
/tmp/private_key.
Then:
ssh -i /tmp/private_key 10.0.0.x where x is the IP address
Signed-off-by: Steven Dake <8638f3fce5db0278cfbc239bd581dfc00c29ec9d@redhat.com><commit_after>#!/usr/bin/python
import eventlet
from eventlet.green import socket
import libssh2
import time
import os
import random
import base64
import uuid
import M2Crypto
from novaclient.v1_1 import client
def instance_start(instance_name, image_name, flavor_name):
"""
Method to start an instance
"""
def _null_callback(p, n, out):
"""
Method to silence the default M2Crypto.RSA.gen_key output.
"""
pass
private_key = M2Crypto.RSA.gen_key(2048, 65537, _null_callback)
# this is the binary public key, in ssh "BN" (BigNumber) MPI format.
# The ssh BN MPI format consists of 4 bytes that describe the length
# of the following data, followed by the data itself in big-endian
# format. The start of the string is 0x0007, which represent the 7
# bytes following that make up 'ssh-rsa'. The key exponent and
# modulus as fetched out of M2Crypto are already in MPI format, so
# we can just use them as-is. We then have to base64 encode the
# result, add a little header information, and then we have a
# full public key.
username = os.environ['OS_USERNAME']
password = os.environ['OS_PASSWORD']
tenant = os.environ['OS_TENANT_NAME']
auth_url = os.environ['OS_AUTH_URL']
nova_client = client.Client(username, password, tenant, auth_url, service_type='compute', service_name='nova')
public_key_bn = '\x00\x00\x00\x07' + 'ssh-rsa' + private_key.e + private_key.n
public_key = 'ssh-rsa %s support@heat-api.org\n' % (base64.b64encode(public_key_bn))
private_key.save_key('/tmp/private_key', cipher=None)
random_uuid = uuid.uuid4()
key_uuid = uuid.uuid3(random_uuid, '%s %s %s' % (instance_name, image_name, flavor_name))
nova_client.keypairs.create(str(key_uuid), public_key)
image_list = nova_client.images.list()
for o in image_list:
if getattr(o, 'name', '') == image_name:
image_id = o.id #getattr(o, 'id', '')
flavor_list = nova_client.flavors.list()
for o in flavor_list:
if getattr(o, 'name', '') == flavor_name:
flavor_id = getattr(o, 'id', '')
nova_client.servers.create(name=instance_name, image=image_id,
flavor=flavor_id, key_name=str(key_uuid))
return private_key
instance_start('instance-F16-test', 'F16-x86_64', "m1.tiny")
|
|
2287d84e2a89fd938ae299f69164602880c611df
|
openprescribing/frontend/management/commands/load_development_data.py
|
openprescribing/frontend/management/commands/load_development_data.py
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
Add command to load data for local development
|
Add command to load data for local development
At present this just loads a bunch of test fixtures (those recommended
by @inglep) but there's obviously scope for doing something else in
future. The main thing is that there's a single command to run which
does all this and we can update what that command actually does at our
leisure.
Thinking more broadly, it might make sense to make it a convention
across all projects that they have such a command.
|
Python
|
mit
|
annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing
|
Add command to load data for local development
At present this just loads a bunch of test fixtures (those recommended
by @inglep) but there's obviously scope for doing something else in
future. The main thing is that there's a single command to run which
does all this and we can update what that command actually does at our
leisure.
Thinking more broadly, it might make sense to make it a convention
across all projects that they have such a command.
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
<commit_before><commit_msg>Add command to load data for local development
At present this just loads a bunch of test fixtures (those recommended
by @inglep) but there's obviously scope for doing something else in
future. The main thing is that there's a single command to run which
does all this and we can update what that command actually does at our
leisure.
Thinking more broadly, it might make sense to make it a convention
across all projects that they have such a command.<commit_after>
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
Add command to load data for local development
At present this just loads a bunch of test fixtures (those recommended
by @inglep) but there's obviously scope for doing something else in
future. The main thing is that there's a single command to run which
does all this and we can update what that command actually does at our
leisure.
Thinking more broadly, it might make sense to make it a convention
across all projects that they have such a command.from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
<commit_before><commit_msg>Add command to load data for local development
At present this just loads a bunch of test fixtures (those recommended
by @inglep) but there's obviously scope for doing something else in
future. The main thing is that there's a single command to run which
does all this and we can update what that command actually does at our
leisure.
Thinking more broadly, it might make sense to make it a convention
across all projects that they have such a command.<commit_after>from django.core.management import call_command
from django.core.management.base import BaseCommand
from frontend.tests.test_api_spending import TestAPISpendingViewsPPUTable
class Command(BaseCommand):
help = 'Loads sample data intended for use in local development'
def handle(self, *args, **options):
# For now we just piggyback off the set of test fixtures used by the
# API tests
fixtures = TestAPISpendingViewsPPUTable.fixtures
call_command('loaddata', *fixtures)
|
|
0eb4c06a818ceaac94be9e1992aed7394ebaca30
|
sara_flexbe_states/src/sara_flexbe_states/Wonderland_Entity_Exist.py
|
sara_flexbe_states/src/sara_flexbe_states/Wonderland_Entity_Exist.py
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Entity_Exist(EventState):
'''
Read the 3D position of an entity in a json string
># json_text string command to read
<= ok return when at least one entity exist
<= empty return when no entity have the selected name
'''
def __init__(self):
# See example_state.py for basic explanations.
super(Wonderland_Entity_Exist, self).__init__(outcomes=['ok', 'empty'], input_keys=['json_text'])
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data:
return 'empty'
else:
return 'ok'
|
Add a state for infor if an entity exist.
|
Add a state for infor if an entity exist.
|
Python
|
bsd-3-clause
|
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
|
Add a state for infor if an entity exist.
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Entity_Exist(EventState):
'''
Read the 3D position of an entity in a json string
># json_text string command to read
<= ok return when at least one entity exist
<= empty return when no entity have the selected name
'''
def __init__(self):
# See example_state.py for basic explanations.
super(Wonderland_Entity_Exist, self).__init__(outcomes=['ok', 'empty'], input_keys=['json_text'])
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data:
return 'empty'
else:
return 'ok'
|
<commit_before><commit_msg>Add a state for infor if an entity exist.<commit_after>
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Entity_Exist(EventState):
'''
Read the 3D position of an entity in a json string
># json_text string command to read
<= ok return when at least one entity exist
<= empty return when no entity have the selected name
'''
def __init__(self):
# See example_state.py for basic explanations.
super(Wonderland_Entity_Exist, self).__init__(outcomes=['ok', 'empty'], input_keys=['json_text'])
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data:
return 'empty'
else:
return 'ok'
|
Add a state for infor if an entity exist.#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Entity_Exist(EventState):
'''
Read the 3D position of an entity in a json string
># json_text string command to read
<= ok return when at least one entity exist
<= empty return when no entity have the selected name
'''
def __init__(self):
# See example_state.py for basic explanations.
super(Wonderland_Entity_Exist, self).__init__(outcomes=['ok', 'empty'], input_keys=['json_text'])
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data:
return 'empty'
else:
return 'ok'
|
<commit_before><commit_msg>Add a state for infor if an entity exist.<commit_after>#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Entity_Exist(EventState):
'''
Read the 3D position of an entity in a json string
># json_text string command to read
<= ok return when at least one entity exist
<= empty return when no entity have the selected name
'''
def __init__(self):
# See example_state.py for basic explanations.
super(Wonderland_Entity_Exist, self).__init__(outcomes=['ok', 'empty'], input_keys=['json_text'])
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data:
return 'empty'
else:
return 'ok'
|
|
731e7c5e4595fb2dc5e27b8811ac6d2d61cb8a60
|
language/Python/17-昆明-土豆/practice_buddle_sort.py
|
language/Python/17-昆明-土豆/practice_buddle_sort.py
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import time
L1 = [i for i in range(1000000)]
print("List Length:", len(L1))
t1 = time.time()
for j in range(0, len(L1) - 1):
if L1[j] > L1[j + 1]:
L1[j], L1[j + 1] = L1[j + 1], L1[j]
else:
pass
print('Sorted List:', L1)
t2 = time.time()
print("Time usage:", t2 - t1)
|
Add some files written by PatrickY.
|
Add some files written by PatrickY.
|
Python
|
mit
|
LinFengYnu/2017YNU,LinFengYnu/2017YNU,LinFengYnu/2017YNU,LinFengYnu/2017YNU,LinFengYnu/2017YNU
|
Add some files written by PatrickY.
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import time
L1 = [i for i in range(1000000)]
print("List Length:", len(L1))
t1 = time.time()
for j in range(0, len(L1) - 1):
if L1[j] > L1[j + 1]:
L1[j], L1[j + 1] = L1[j + 1], L1[j]
else:
pass
print('Sorted List:', L1)
t2 = time.time()
print("Time usage:", t2 - t1)
|
<commit_before><commit_msg>Add some files written by PatrickY.<commit_after>
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import time
L1 = [i for i in range(1000000)]
print("List Length:", len(L1))
t1 = time.time()
for j in range(0, len(L1) - 1):
if L1[j] > L1[j + 1]:
L1[j], L1[j + 1] = L1[j + 1], L1[j]
else:
pass
print('Sorted List:', L1)
t2 = time.time()
print("Time usage:", t2 - t1)
|
Add some files written by PatrickY.#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import time
L1 = [i for i in range(1000000)]
print("List Length:", len(L1))
t1 = time.time()
for j in range(0, len(L1) - 1):
if L1[j] > L1[j + 1]:
L1[j], L1[j + 1] = L1[j + 1], L1[j]
else:
pass
print('Sorted List:', L1)
t2 = time.time()
print("Time usage:", t2 - t1)
|
<commit_before><commit_msg>Add some files written by PatrickY.<commit_after>#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import time
L1 = [i for i in range(1000000)]
print("List Length:", len(L1))
t1 = time.time()
for j in range(0, len(L1) - 1):
if L1[j] > L1[j + 1]:
L1[j], L1[j + 1] = L1[j + 1], L1[j]
else:
pass
print('Sorted List:', L1)
t2 = time.time()
print("Time usage:", t2 - t1)
|
|
3558fa2b35e02704c1556081a069b24e07e8d5a6
|
alembic/versions/8f5b2066cbac_add_self_referential_image_reference.py
|
alembic/versions/8f5b2066cbac_add_self_referential_image_reference.py
|
"""Add self-referential image reference
Revision ID: 8f5b2066cbac
Revises: 698cc06661d6
Create Date: 2016-03-20 19:35:31.321144
"""
# revision identifiers, used by Alembic.
revision = '8f5b2066cbac'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_images_gallery_id'), ['gallery_id'], unique=False)
batch_op.create_index(batch_op.f('ix_images_site_id'), ['site_id'], unique=False)
batch_op.create_foreign_key('ix_images_parent_image_id', 'images', ['parent_image_id'], ['id'])
batch_op.create_foreign_key('ix_sites_site_id', 'sites', ['site_id'], ['id'])
batch_op.create_foreign_key('ix_galleries_gallery_id', 'galleries', ['gallery_id'], ['id'])
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'), ['page_section_id'], unique=False)
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.drop_index('ix_sites_settings_site_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.create_index('ix_sites_settings_site_id', ['site_id'], unique=False)
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'))
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_images_site_id'))
batch_op.drop_index(batch_op.f('ix_images_gallery_id'))
### end Alembic commands ###
|
Add DB migration for self-referential image id .. that works on sqlalchemy
|
Add DB migration for self-referential image id .. that works on sqlalchemy
|
Python
|
mit
|
matslindh/kimochi,matslindh/kimochi
|
Add DB migration for self-referential image id .. that works on sqlalchemy
|
"""Add self-referential image reference
Revision ID: 8f5b2066cbac
Revises: 698cc06661d6
Create Date: 2016-03-20 19:35:31.321144
"""
# revision identifiers, used by Alembic.
revision = '8f5b2066cbac'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_images_gallery_id'), ['gallery_id'], unique=False)
batch_op.create_index(batch_op.f('ix_images_site_id'), ['site_id'], unique=False)
batch_op.create_foreign_key('ix_images_parent_image_id', 'images', ['parent_image_id'], ['id'])
batch_op.create_foreign_key('ix_sites_site_id', 'sites', ['site_id'], ['id'])
batch_op.create_foreign_key('ix_galleries_gallery_id', 'galleries', ['gallery_id'], ['id'])
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'), ['page_section_id'], unique=False)
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.drop_index('ix_sites_settings_site_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.create_index('ix_sites_settings_site_id', ['site_id'], unique=False)
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'))
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_images_site_id'))
batch_op.drop_index(batch_op.f('ix_images_gallery_id'))
### end Alembic commands ###
|
<commit_before><commit_msg>Add DB migration for self-referential image id .. that works on sqlalchemy<commit_after>
|
"""Add self-referential image reference
Revision ID: 8f5b2066cbac
Revises: 698cc06661d6
Create Date: 2016-03-20 19:35:31.321144
"""
# revision identifiers, used by Alembic.
revision = '8f5b2066cbac'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_images_gallery_id'), ['gallery_id'], unique=False)
batch_op.create_index(batch_op.f('ix_images_site_id'), ['site_id'], unique=False)
batch_op.create_foreign_key('ix_images_parent_image_id', 'images', ['parent_image_id'], ['id'])
batch_op.create_foreign_key('ix_sites_site_id', 'sites', ['site_id'], ['id'])
batch_op.create_foreign_key('ix_galleries_gallery_id', 'galleries', ['gallery_id'], ['id'])
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'), ['page_section_id'], unique=False)
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.drop_index('ix_sites_settings_site_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.create_index('ix_sites_settings_site_id', ['site_id'], unique=False)
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'))
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_images_site_id'))
batch_op.drop_index(batch_op.f('ix_images_gallery_id'))
### end Alembic commands ###
|
Add DB migration for self-referential image id .. that works on sqlalchemy"""Add self-referential image reference
Revision ID: 8f5b2066cbac
Revises: 698cc06661d6
Create Date: 2016-03-20 19:35:31.321144
"""
# revision identifiers, used by Alembic.
revision = '8f5b2066cbac'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_images_gallery_id'), ['gallery_id'], unique=False)
batch_op.create_index(batch_op.f('ix_images_site_id'), ['site_id'], unique=False)
batch_op.create_foreign_key('ix_images_parent_image_id', 'images', ['parent_image_id'], ['id'])
batch_op.create_foreign_key('ix_sites_site_id', 'sites', ['site_id'], ['id'])
batch_op.create_foreign_key('ix_galleries_gallery_id', 'galleries', ['gallery_id'], ['id'])
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'), ['page_section_id'], unique=False)
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.drop_index('ix_sites_settings_site_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.create_index('ix_sites_settings_site_id', ['site_id'], unique=False)
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'))
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_images_site_id'))
batch_op.drop_index(batch_op.f('ix_images_gallery_id'))
### end Alembic commands ###
|
<commit_before><commit_msg>Add DB migration for self-referential image id .. that works on sqlalchemy<commit_after>"""Add self-referential image reference
Revision ID: 8f5b2066cbac
Revises: 698cc06661d6
Create Date: 2016-03-20 19:35:31.321144
"""
# revision identifiers, used by Alembic.
revision = '8f5b2066cbac'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_images_gallery_id'), ['gallery_id'], unique=False)
batch_op.create_index(batch_op.f('ix_images_site_id'), ['site_id'], unique=False)
batch_op.create_foreign_key('ix_images_parent_image_id', 'images', ['parent_image_id'], ['id'])
batch_op.create_foreign_key('ix_sites_site_id', 'sites', ['site_id'], ['id'])
batch_op.create_foreign_key('ix_galleries_gallery_id', 'galleries', ['gallery_id'], ['id'])
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'), ['page_section_id'], unique=False)
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.drop_index('ix_sites_settings_site_id')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sites_settings', schema=None) as batch_op:
batch_op.create_index('ix_sites_settings_site_id', ['site_id'], unique=False)
with op.batch_alter_table('pages_sections_layout_settings', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_pages_sections_layout_settings_page_section_id'))
with op.batch_alter_table('images', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_constraint(None, type_='foreignkey')
batch_op.drop_index(batch_op.f('ix_images_site_id'))
batch_op.drop_index(batch_op.f('ix_images_gallery_id'))
### end Alembic commands ###
|
|
5a866e5141e42a212b13ec98233cc4ec88ad04bf
|
videos/tests/test_models.py
|
videos/tests/test_models.py
|
from django.test import TestCase
from .. import models
class TestVideo(TestCase):
def test_fields(self):
expected_fields = (
'id',
'title',
'slug',
'preview',
'length',
'recorded',
'created',
# Incoming
'source', # TODO: set a verbose name.
)
fields = models.Video._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
class TestSource(TestCase):
def test_fields(self):
expected_fields = (
'id',
'video',
'file',
'type',
)
fields = models.Source._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
|
Test fields of core models
|
Test fields of core models
|
Python
|
bsd-2-clause
|
incuna/incuna-videos,incuna/incuna-videos
|
Test fields of core models
|
from django.test import TestCase
from .. import models
class TestVideo(TestCase):
def test_fields(self):
expected_fields = (
'id',
'title',
'slug',
'preview',
'length',
'recorded',
'created',
# Incoming
'source', # TODO: set a verbose name.
)
fields = models.Video._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
class TestSource(TestCase):
def test_fields(self):
expected_fields = (
'id',
'video',
'file',
'type',
)
fields = models.Source._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
|
<commit_before><commit_msg>Test fields of core models<commit_after>
|
from django.test import TestCase
from .. import models
class TestVideo(TestCase):
def test_fields(self):
expected_fields = (
'id',
'title',
'slug',
'preview',
'length',
'recorded',
'created',
# Incoming
'source', # TODO: set a verbose name.
)
fields = models.Video._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
class TestSource(TestCase):
def test_fields(self):
expected_fields = (
'id',
'video',
'file',
'type',
)
fields = models.Source._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
|
Test fields of core modelsfrom django.test import TestCase
from .. import models
class TestVideo(TestCase):
def test_fields(self):
expected_fields = (
'id',
'title',
'slug',
'preview',
'length',
'recorded',
'created',
# Incoming
'source', # TODO: set a verbose name.
)
fields = models.Video._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
class TestSource(TestCase):
def test_fields(self):
expected_fields = (
'id',
'video',
'file',
'type',
)
fields = models.Source._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
|
<commit_before><commit_msg>Test fields of core models<commit_after>from django.test import TestCase
from .. import models
class TestVideo(TestCase):
def test_fields(self):
expected_fields = (
'id',
'title',
'slug',
'preview',
'length',
'recorded',
'created',
# Incoming
'source', # TODO: set a verbose name.
)
fields = models.Video._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
class TestSource(TestCase):
def test_fields(self):
expected_fields = (
'id',
'video',
'file',
'type',
)
fields = models.Source._meta.get_all_field_names()
self.assertCountEqual(fields, expected_fields)
|
|
61560b843db83d5542d4924a3e3caf06ed00711f
|
python/ecep/portal/migrations/0009_auto_20151022_1556.py
|
python/ecep/portal/migrations/0009_auto_20151022_1556.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def clear_enrollment_info(apps, schema_editor):
"""
Get rid of all that dumb crap.
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
loc.enrollment_en = None
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0008_auto_20150804_1635'),
]
operations = [
migrations.RunPython(clear_enrollment_info),
]
|
Add migration to perform bulk data operation removing garbage enrollment info
|
Add migration to perform bulk data operation removing garbage enrollment info
|
Python
|
mit
|
smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning
|
Add migration to perform bulk data operation removing garbage enrollment info
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def clear_enrollment_info(apps, schema_editor):
"""
Get rid of all that dumb crap.
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
loc.enrollment_en = None
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0008_auto_20150804_1635'),
]
operations = [
migrations.RunPython(clear_enrollment_info),
]
|
<commit_before><commit_msg>Add migration to perform bulk data operation removing garbage enrollment info<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def clear_enrollment_info(apps, schema_editor):
"""
Get rid of all that dumb crap.
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
loc.enrollment_en = None
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0008_auto_20150804_1635'),
]
operations = [
migrations.RunPython(clear_enrollment_info),
]
|
Add migration to perform bulk data operation removing garbage enrollment info# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def clear_enrollment_info(apps, schema_editor):
"""
Get rid of all that dumb crap.
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
loc.enrollment_en = None
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0008_auto_20150804_1635'),
]
operations = [
migrations.RunPython(clear_enrollment_info),
]
|
<commit_before><commit_msg>Add migration to perform bulk data operation removing garbage enrollment info<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def clear_enrollment_info(apps, schema_editor):
"""
Get rid of all that dumb crap.
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
loc.enrollment_en = None
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0008_auto_20150804_1635'),
]
operations = [
migrations.RunPython(clear_enrollment_info),
]
|
|
f5201902da06f7152e61027a640bc949b0fc5085
|
download_vgg19.py
|
download_vgg19.py
|
"""Downloads the pretrained VGG model."""
import argparse
import os
from fuel.downloaders.base import default_downloader
URL = ('https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/'
'vgg19_normalized.pkl')
def main():
default_downloader(os.getcwd(), [URL], [None])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Download the pretrained VGG model")
args = parser.parse_args()
main()
|
Add download script for VGG19
|
Add download script for VGG19
|
Python
|
mit
|
vdumoulin/discgen
|
Add download script for VGG19
|
"""Downloads the pretrained VGG model."""
import argparse
import os
from fuel.downloaders.base import default_downloader
URL = ('https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/'
'vgg19_normalized.pkl')
def main():
default_downloader(os.getcwd(), [URL], [None])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Download the pretrained VGG model")
args = parser.parse_args()
main()
|
<commit_before><commit_msg>Add download script for VGG19<commit_after>
|
"""Downloads the pretrained VGG model."""
import argparse
import os
from fuel.downloaders.base import default_downloader
URL = ('https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/'
'vgg19_normalized.pkl')
def main():
default_downloader(os.getcwd(), [URL], [None])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Download the pretrained VGG model")
args = parser.parse_args()
main()
|
Add download script for VGG19"""Downloads the pretrained VGG model."""
import argparse
import os
from fuel.downloaders.base import default_downloader
URL = ('https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/'
'vgg19_normalized.pkl')
def main():
default_downloader(os.getcwd(), [URL], [None])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Download the pretrained VGG model")
args = parser.parse_args()
main()
|
<commit_before><commit_msg>Add download script for VGG19<commit_after>"""Downloads the pretrained VGG model."""
import argparse
import os
from fuel.downloaders.base import default_downloader
URL = ('https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/'
'vgg19_normalized.pkl')
def main():
default_downloader(os.getcwd(), [URL], [None])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Download the pretrained VGG model")
args = parser.parse_args()
main()
|
|
1218887b10e39672e356fabbe8b9772d41d316c9
|
haystackbrowser/forms.py
|
haystackbrowser/forms.py
|
from django import forms
from haystack.forms import ModelSearchForm
class PreSelectedModelSearchForm(ModelSearchForm):
def __init__(self, *args, **kwargs):
super(PreSelectedModelSearchForm, self).__init__(*args, **kwargs)
self.fields['models'].initial = (choice[0] for choice in self.fields['models'].choices)
|
Create a form subclass with all models pre-ticked.
|
Create a form subclass with all models pre-ticked.
|
Python
|
bsd-2-clause
|
vmarkovtsev/django-haystackbrowser,vmarkovtsev/django-haystackbrowser,jannon/django-haystackbrowser,jannon/django-haystackbrowser,vmarkovtsev/django-haystackbrowser,jannon/django-haystackbrowser
|
Create a form subclass with all models pre-ticked.
|
from django import forms
from haystack.forms import ModelSearchForm
class PreSelectedModelSearchForm(ModelSearchForm):
def __init__(self, *args, **kwargs):
super(PreSelectedModelSearchForm, self).__init__(*args, **kwargs)
self.fields['models'].initial = (choice[0] for choice in self.fields['models'].choices)
|
<commit_before><commit_msg>Create a form subclass with all models pre-ticked.<commit_after>
|
from django import forms
from haystack.forms import ModelSearchForm
class PreSelectedModelSearchForm(ModelSearchForm):
def __init__(self, *args, **kwargs):
super(PreSelectedModelSearchForm, self).__init__(*args, **kwargs)
self.fields['models'].initial = (choice[0] for choice in self.fields['models'].choices)
|
Create a form subclass with all models pre-ticked.from django import forms
from haystack.forms import ModelSearchForm
class PreSelectedModelSearchForm(ModelSearchForm):
def __init__(self, *args, **kwargs):
super(PreSelectedModelSearchForm, self).__init__(*args, **kwargs)
self.fields['models'].initial = (choice[0] for choice in self.fields['models'].choices)
|
<commit_before><commit_msg>Create a form subclass with all models pre-ticked.<commit_after>from django import forms
from haystack.forms import ModelSearchForm
class PreSelectedModelSearchForm(ModelSearchForm):
def __init__(self, *args, **kwargs):
super(PreSelectedModelSearchForm, self).__init__(*args, **kwargs)
self.fields['models'].initial = (choice[0] for choice in self.fields['models'].choices)
|
|
81e85f8d89a90be7444e39bd2d5dedf49d299922
|
robot_localisation/main.py
|
robot_localisation/main.py
|
"""
This module contains the logic to run the simulation.
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
from robot_localisation.grid import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
the_grid = Grid(height=args.rows, width=args.columns) # or something
print(the_grid)
|
Add basic argparse of rows and columns
|
Add basic argparse of rows and columns
|
Python
|
mit
|
AxelTLarsson/robot-localisation
|
Add basic argparse of rows and columns
|
"""
This module contains the logic to run the simulation.
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
from robot_localisation.grid import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
the_grid = Grid(height=args.rows, width=args.columns) # or something
print(the_grid)
|
<commit_before><commit_msg>Add basic argparse of rows and columns<commit_after>
|
"""
This module contains the logic to run the simulation.
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
from robot_localisation.grid import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
the_grid = Grid(height=args.rows, width=args.columns) # or something
print(the_grid)
|
Add basic argparse of rows and columns"""
This module contains the logic to run the simulation.
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
from robot_localisation.grid import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
the_grid = Grid(height=args.rows, width=args.columns) # or something
print(the_grid)
|
<commit_before><commit_msg>Add basic argparse of rows and columns<commit_after>"""
This module contains the logic to run the simulation.
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
from robot_localisation.grid import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Robot localisation with HMM')
parser.add_argument(
'-r', '--rows',
type=int,
help='the number of rows on the grid, default is 4',
default=4)
parser.add_argument(
'-c', '--columns',
type=int,
help='the number of columns on the grid, default is 4',
default=4)
args = parser.parse_args()
the_grid = Grid(height=args.rows, width=args.columns) # or something
print(the_grid)
|
|
1557372d65872c0cfdb206ae784dd425fe503ad2
|
coffer/coffer.py
|
coffer/coffer.py
|
"""
Secure storage container for treasures
"""
class Coffer(object):
"""
Secure storage container for treasures
Treasures must be removed to be modified and re-added to be
stored again.
"""
def __init__(self, path, *args, **kwargs):
"""
Creates a new coffer
"""
self.__path = path
def add(self, outer_path, inner_path, *args, **kwargs):
"""
Adds a treasure to the coffer
"""
pass
def remove(self, inner_path, outer_path, *args, **kwargs):
"""
Removes a treasure from the coffer
"""
return None
def get_treasures(self, *args, **pwargs):
"""
Get an iterable of paths to each treasure inside the coffer
"""
return iter(())
@property
def path(self, *args, **kwargs):
return self.__path
|
Add skeleton code for Coffer class
|
Add skeleton code for Coffer class
|
Python
|
mit
|
robobrobro/coffer,robobrobro/coffer
|
Add skeleton code for Coffer class
|
"""
Secure storage container for treasures
"""
class Coffer(object):
"""
Secure storage container for treasures
Treasures must be removed to be modified and re-added to be
stored again.
"""
def __init__(self, path, *args, **kwargs):
"""
Creates a new coffer
"""
self.__path = path
def add(self, outer_path, inner_path, *args, **kwargs):
"""
Adds a treasure to the coffer
"""
pass
def remove(self, inner_path, outer_path, *args, **kwargs):
"""
Removes a treasure from the coffer
"""
return None
def get_treasures(self, *args, **pwargs):
"""
Get an iterable of paths to each treasure inside the coffer
"""
return iter(())
@property
def path(self, *args, **kwargs):
return self.__path
|
<commit_before><commit_msg>Add skeleton code for Coffer class<commit_after>
|
"""
Secure storage container for treasures
"""
class Coffer(object):
"""
Secure storage container for treasures
Treasures must be removed to be modified and re-added to be
stored again.
"""
def __init__(self, path, *args, **kwargs):
"""
Creates a new coffer
"""
self.__path = path
def add(self, outer_path, inner_path, *args, **kwargs):
"""
Adds a treasure to the coffer
"""
pass
def remove(self, inner_path, outer_path, *args, **kwargs):
"""
Removes a treasure from the coffer
"""
return None
def get_treasures(self, *args, **pwargs):
"""
Get an iterable of paths to each treasure inside the coffer
"""
return iter(())
@property
def path(self, *args, **kwargs):
return self.__path
|
Add skeleton code for Coffer class"""
Secure storage container for treasures
"""
class Coffer(object):
"""
Secure storage container for treasures
Treasures must be removed to be modified and re-added to be
stored again.
"""
def __init__(self, path, *args, **kwargs):
"""
Creates a new coffer
"""
self.__path = path
def add(self, outer_path, inner_path, *args, **kwargs):
"""
Adds a treasure to the coffer
"""
pass
def remove(self, inner_path, outer_path, *args, **kwargs):
"""
Removes a treasure from the coffer
"""
return None
def get_treasures(self, *args, **pwargs):
"""
Get an iterable of paths to each treasure inside the coffer
"""
return iter(())
@property
def path(self, *args, **kwargs):
return self.__path
|
<commit_before><commit_msg>Add skeleton code for Coffer class<commit_after>"""
Secure storage container for treasures
"""
class Coffer(object):
"""
Secure storage container for treasures
Treasures must be removed to be modified and re-added to be
stored again.
"""
def __init__(self, path, *args, **kwargs):
"""
Creates a new coffer
"""
self.__path = path
def add(self, outer_path, inner_path, *args, **kwargs):
"""
Adds a treasure to the coffer
"""
pass
def remove(self, inner_path, outer_path, *args, **kwargs):
"""
Removes a treasure from the coffer
"""
return None
def get_treasures(self, *args, **pwargs):
"""
Get an iterable of paths to each treasure inside the coffer
"""
return iter(())
@property
def path(self, *args, **kwargs):
return self.__path
|
|
c4abca82c9e1a11ee92878c99e30853116f07c4c
|
changelogs/custom/pypi/synapse.py
|
changelogs/custom/pypi/synapse.py
|
def get_urls(releases, **kwargs):
# Pypi has a old bugtracker_url which points to a separate repo which causes invalid
# changelogs to be generated by this tool.
ret = {'https://raw.githubusercontent.com/vertexproject/synapse/master/CHANGELOG.md'}
return ret, set()
|
Add a custom get_urls for the Synapse package.
|
Add a custom get_urls for the Synapse package.
|
Python
|
mit
|
pyupio/changelogs
|
Add a custom get_urls for the Synapse package.
|
def get_urls(releases, **kwargs):
# Pypi has a old bugtracker_url which points to a separate repo which causes invalid
# changelogs to be generated by this tool.
ret = {'https://raw.githubusercontent.com/vertexproject/synapse/master/CHANGELOG.md'}
return ret, set()
|
<commit_before><commit_msg>Add a custom get_urls for the Synapse package.<commit_after>
|
def get_urls(releases, **kwargs):
# Pypi has a old bugtracker_url which points to a separate repo which causes invalid
# changelogs to be generated by this tool.
ret = {'https://raw.githubusercontent.com/vertexproject/synapse/master/CHANGELOG.md'}
return ret, set()
|
Add a custom get_urls for the Synapse package.
def get_urls(releases, **kwargs):
# Pypi has a old bugtracker_url which points to a separate repo which causes invalid
# changelogs to be generated by this tool.
ret = {'https://raw.githubusercontent.com/vertexproject/synapse/master/CHANGELOG.md'}
return ret, set()
|
<commit_before><commit_msg>Add a custom get_urls for the Synapse package.<commit_after>
def get_urls(releases, **kwargs):
# Pypi has a old bugtracker_url which points to a separate repo which causes invalid
# changelogs to be generated by this tool.
ret = {'https://raw.githubusercontent.com/vertexproject/synapse/master/CHANGELOG.md'}
return ret, set()
|
|
2380e5074426f1c0006eaef1c08c9f213c834458
|
lab/11/template_11_c.py
|
lab/11/template_11_c.py
|
from tkinter import *
class Kalkulator():
def __init__(self, master):
self.master = master
# TODO: Set title window menjadi "Kalkulator Sederhana" di bawah ini.
# TODO: Buatlah Label, Entry, Button,
# dan komponen-komponen lain yang dibutuhkan.
def tambah(self):
# TODO: Implementasikan fungsi tambah, hapus "pass"
pass
def kurang(self):
# TODO: Implementasikan fungsi kurang, hapus "pass"
pass
def kali(self):
# TODO: Implementasikan fungsi kali, hapus "pass"
pass
def bagi(self):
# TODO: Implementasikan fungsi bagi, hapus "pass"
pass
root = Tk()
gui = Kalkulator(root)
root.mainloop()
|
Add lab 11 template for class C
|
Add lab 11 template for class C
|
Python
|
mit
|
giovanism/TarungLab,laymonage/TarungLab
|
Add lab 11 template for class C
|
from tkinter import *
class Kalkulator():
def __init__(self, master):
self.master = master
# TODO: Set title window menjadi "Kalkulator Sederhana" di bawah ini.
# TODO: Buatlah Label, Entry, Button,
# dan komponen-komponen lain yang dibutuhkan.
def tambah(self):
# TODO: Implementasikan fungsi tambah, hapus "pass"
pass
def kurang(self):
# TODO: Implementasikan fungsi kurang, hapus "pass"
pass
def kali(self):
# TODO: Implementasikan fungsi kali, hapus "pass"
pass
def bagi(self):
# TODO: Implementasikan fungsi bagi, hapus "pass"
pass
root = Tk()
gui = Kalkulator(root)
root.mainloop()
|
<commit_before><commit_msg>Add lab 11 template for class C<commit_after>
|
from tkinter import *
class Kalkulator():
def __init__(self, master):
self.master = master
# TODO: Set title window menjadi "Kalkulator Sederhana" di bawah ini.
# TODO: Buatlah Label, Entry, Button,
# dan komponen-komponen lain yang dibutuhkan.
def tambah(self):
# TODO: Implementasikan fungsi tambah, hapus "pass"
pass
def kurang(self):
# TODO: Implementasikan fungsi kurang, hapus "pass"
pass
def kali(self):
# TODO: Implementasikan fungsi kali, hapus "pass"
pass
def bagi(self):
# TODO: Implementasikan fungsi bagi, hapus "pass"
pass
root = Tk()
gui = Kalkulator(root)
root.mainloop()
|
Add lab 11 template for class Cfrom tkinter import *
class Kalkulator():
def __init__(self, master):
self.master = master
# TODO: Set title window menjadi "Kalkulator Sederhana" di bawah ini.
# TODO: Buatlah Label, Entry, Button,
# dan komponen-komponen lain yang dibutuhkan.
def tambah(self):
# TODO: Implementasikan fungsi tambah, hapus "pass"
pass
def kurang(self):
# TODO: Implementasikan fungsi kurang, hapus "pass"
pass
def kali(self):
# TODO: Implementasikan fungsi kali, hapus "pass"
pass
def bagi(self):
# TODO: Implementasikan fungsi bagi, hapus "pass"
pass
root = Tk()
gui = Kalkulator(root)
root.mainloop()
|
<commit_before><commit_msg>Add lab 11 template for class C<commit_after>from tkinter import *
class Kalkulator():
def __init__(self, master):
self.master = master
# TODO: Set title window menjadi "Kalkulator Sederhana" di bawah ini.
# TODO: Buatlah Label, Entry, Button,
# dan komponen-komponen lain yang dibutuhkan.
def tambah(self):
# TODO: Implementasikan fungsi tambah, hapus "pass"
pass
def kurang(self):
# TODO: Implementasikan fungsi kurang, hapus "pass"
pass
def kali(self):
# TODO: Implementasikan fungsi kali, hapus "pass"
pass
def bagi(self):
# TODO: Implementasikan fungsi bagi, hapus "pass"
pass
root = Tk()
gui = Kalkulator(root)
root.mainloop()
|
|
85e855e19d05d4e7c273abc50dfce40c3277182b
|
hydrachain/tests/test_sim_joins.py
|
hydrachain/tests/test_sim_joins.py
|
import pytest
from hydrachain.consensus.simulation import Network, assert_heightdistance
# run this test with `tox -- -rx -k test_late_joins`
@pytest.mark.xfail
@pytest.mark.parametrize('validators', range(3, 10))
@pytest.mark.parametrize('late', range(1, 3))
@pytest.mark.parametrize('delay', [2])
def test_late_joins(validators, late, delay):
network = Network(num_nodes=validators, simenv=True)
for node in network.nodes[validators - late:]:
node.isactive = False
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(delay * (validators - late))
for node in network.nodes[validators - late:]:
node.isactive = True
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(max(10, validators * 2))
r = network.check_consistency()
assert_heightdistance(r)
assert r['heights'][10] > 0
|
Add test for late-join scenario
|
Add test for late-join scenario
This adds a parametrized test for variations of
num_validators, late-joiners, late-join-delays.
Currently it fails for
- num_validators > 3 && late-joiners = 1 && delay = 2
- num_validators [7, 9] && late-joiners = 2 && delay = 2
|
Python
|
mit
|
HydraChain/hydrachain,wangkangda/hydrachain,HydraChain/hydrachain,wangkangda/hydrachain
|
Add test for late-join scenario
This adds a parametrized test for variations of
num_validators, late-joiners, late-join-delays.
Currently it fails for
- num_validators > 3 && late-joiners = 1 && delay = 2
- num_validators [7, 9] && late-joiners = 2 && delay = 2
|
import pytest
from hydrachain.consensus.simulation import Network, assert_heightdistance
# run this test with `tox -- -rx -k test_late_joins`
@pytest.mark.xfail
@pytest.mark.parametrize('validators', range(3, 10))
@pytest.mark.parametrize('late', range(1, 3))
@pytest.mark.parametrize('delay', [2])
def test_late_joins(validators, late, delay):
network = Network(num_nodes=validators, simenv=True)
for node in network.nodes[validators - late:]:
node.isactive = False
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(delay * (validators - late))
for node in network.nodes[validators - late:]:
node.isactive = True
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(max(10, validators * 2))
r = network.check_consistency()
assert_heightdistance(r)
assert r['heights'][10] > 0
|
<commit_before><commit_msg>Add test for late-join scenario
This adds a parametrized test for variations of
num_validators, late-joiners, late-join-delays.
Currently it fails for
- num_validators > 3 && late-joiners = 1 && delay = 2
- num_validators [7, 9] && late-joiners = 2 && delay = 2<commit_after>
|
import pytest
from hydrachain.consensus.simulation import Network, assert_heightdistance
# run this test with `tox -- -rx -k test_late_joins`
@pytest.mark.xfail
@pytest.mark.parametrize('validators', range(3, 10))
@pytest.mark.parametrize('late', range(1, 3))
@pytest.mark.parametrize('delay', [2])
def test_late_joins(validators, late, delay):
network = Network(num_nodes=validators, simenv=True)
for node in network.nodes[validators - late:]:
node.isactive = False
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(delay * (validators - late))
for node in network.nodes[validators - late:]:
node.isactive = True
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(max(10, validators * 2))
r = network.check_consistency()
assert_heightdistance(r)
assert r['heights'][10] > 0
|
Add test for late-join scenario
This adds a parametrized test for variations of
num_validators, late-joiners, late-join-delays.
Currently it fails for
- num_validators > 3 && late-joiners = 1 && delay = 2
- num_validators [7, 9] && late-joiners = 2 && delay = 2import pytest
from hydrachain.consensus.simulation import Network, assert_heightdistance
# run this test with `tox -- -rx -k test_late_joins`
@pytest.mark.xfail
@pytest.mark.parametrize('validators', range(3, 10))
@pytest.mark.parametrize('late', range(1, 3))
@pytest.mark.parametrize('delay', [2])
def test_late_joins(validators, late, delay):
network = Network(num_nodes=validators, simenv=True)
for node in network.nodes[validators - late:]:
node.isactive = False
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(delay * (validators - late))
for node in network.nodes[validators - late:]:
node.isactive = True
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(max(10, validators * 2))
r = network.check_consistency()
assert_heightdistance(r)
assert r['heights'][10] > 0
|
<commit_before><commit_msg>Add test for late-join scenario
This adds a parametrized test for variations of
num_validators, late-joiners, late-join-delays.
Currently it fails for
- num_validators > 3 && late-joiners = 1 && delay = 2
- num_validators [7, 9] && late-joiners = 2 && delay = 2<commit_after>import pytest
from hydrachain.consensus.simulation import Network, assert_heightdistance
# run this test with `tox -- -rx -k test_late_joins`
@pytest.mark.xfail
@pytest.mark.parametrize('validators', range(3, 10))
@pytest.mark.parametrize('late', range(1, 3))
@pytest.mark.parametrize('delay', [2])
def test_late_joins(validators, late, delay):
network = Network(num_nodes=validators, simenv=True)
for node in network.nodes[validators - late:]:
node.isactive = False
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(delay * (validators - late))
for node in network.nodes[validators - late:]:
node.isactive = True
network.connect_nodes()
network.normvariate_base_latencies()
network.start()
network.run(max(10, validators * 2))
r = network.check_consistency()
assert_heightdistance(r)
assert r['heights'][10] > 0
|
|
79313c3ec73591498170de911ea58bc33cf6cf05
|
collection_pipelines/std.py
|
collection_pipelines/std.py
|
"""Standard pipeline processors."""
from collection_pipelines.core import CollectionPipelineOutput
class value(CollectionPipelineOutput):
"""Output processor that returns pipeline items."""
def __init__(self):
self.retval = []
def process(self, item):
"""Appends the item to results list."""
self.retval.append(item)
def return_value(self):
"""
Returns:
[any]: pipeline items.
any: if only one item went through the pipeline.
"""
if len(self.retval) == 1:
return self.retval[0]
return self.retval
|
Add output processor that returns the collected items
|
Add output processor that returns the collected items
|
Python
|
mit
|
povilasb/pycollection-pipelines
|
Add output processor that returns the collected items
|
"""Standard pipeline processors."""
from collection_pipelines.core import CollectionPipelineOutput
class value(CollectionPipelineOutput):
"""Output processor that returns pipeline items."""
def __init__(self):
self.retval = []
def process(self, item):
"""Appends the item to results list."""
self.retval.append(item)
def return_value(self):
"""
Returns:
[any]: pipeline items.
any: if only one item went through the pipeline.
"""
if len(self.retval) == 1:
return self.retval[0]
return self.retval
|
<commit_before><commit_msg>Add output processor that returns the collected items<commit_after>
|
"""Standard pipeline processors."""
from collection_pipelines.core import CollectionPipelineOutput
class value(CollectionPipelineOutput):
"""Output processor that returns pipeline items."""
def __init__(self):
self.retval = []
def process(self, item):
"""Appends the item to results list."""
self.retval.append(item)
def return_value(self):
"""
Returns:
[any]: pipeline items.
any: if only one item went through the pipeline.
"""
if len(self.retval) == 1:
return self.retval[0]
return self.retval
|
Add output processor that returns the collected items"""Standard pipeline processors."""
from collection_pipelines.core import CollectionPipelineOutput
class value(CollectionPipelineOutput):
"""Output processor that returns pipeline items."""
def __init__(self):
self.retval = []
def process(self, item):
"""Appends the item to results list."""
self.retval.append(item)
def return_value(self):
"""
Returns:
[any]: pipeline items.
any: if only one item went through the pipeline.
"""
if len(self.retval) == 1:
return self.retval[0]
return self.retval
|
<commit_before><commit_msg>Add output processor that returns the collected items<commit_after>"""Standard pipeline processors."""
from collection_pipelines.core import CollectionPipelineOutput
class value(CollectionPipelineOutput):
"""Output processor that returns pipeline items."""
def __init__(self):
self.retval = []
def process(self, item):
"""Appends the item to results list."""
self.retval.append(item)
def return_value(self):
"""
Returns:
[any]: pipeline items.
any: if only one item went through the pipeline.
"""
if len(self.retval) == 1:
return self.retval[0]
return self.retval
|
|
0df317159243b29d714e8b28c55439bf6af49fd3
|
scripts/python/framework.py
|
scripts/python/framework.py
|
#
# IAS Basic device framework.
#
# Author: Joeri Hermans
#
import sys
import socket
import struct
# Global members, which are required for the communication
# with the remote IAS controller.
gDeviceIdentifier = sys.argv[1]
gControllerAddress = sys.argv[2]
gControllerPort = int(sys.argv[3])
gSocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
gSocket.connect((gControllerAddress,gControllerPort))
gRunning = True
def updateState( stateIdentifier , newValue ):
global gSocket
stateIdentifierLength = len(stateIdentifier)
newValueLength = len(newValue)
data = struct.pack("!BBB",0x01,stateIdentifierLength,newValueLength);
data += str.encode(stateIdentifier)
data += str.encode(newValue)
gSocket.sendall(data)
def authenticate():
global gDeviceIdentifier
global gSocket;
identifierLength = len(gDeviceIdentifier)
message = struct.pack("!BB",0x00,identifierLength) + bytes(gDeviceIdentifier.encode("ascii"));
gSocket.sendall(message);
def processFeature(featureIdentifier,parameter):
# TODO Implement
print("Executing " + featureIdentifier + " with " + parameter)
def processCommand():
global gSocket
global gRunning
data = gSocket.recv(3);
data = struct.unpack("!BBB",data)
if( data[0] != 0x01 ):
gRunning = False
return
featureIdentifierLength = data[1]
parameterLength = data[2]
featureIdentifier = gSocket.recv(featureIdentifierLength)
featureIdentifier = featureIdentifier.decode("ascii")
if( parameterLength > 0 ):
parameter = gSocket.recv(parameterLength)
else:
parameter = ""
parameter = parameter.decode("ascii")
processFeature(featureIdentifier,parameter)
def processCommands():
global gRunning
while( gRunning ):
try:
processCommand()
except:
gRunning = False
def main():
authenticate()
processCommands()
if( __name__ == "__main__" ):
main()
|
Add basic Python script for devices.
|
Add basic Python script for devices.
|
Python
|
apache-2.0
|
JoeriHermans/Intelligent-Automation-System,JoeriHermans/Intelligent-Automation-System,JoeriHermans/Intelligent-Automation-System,JoeriHermans/Intelligent-Automation-System
|
Add basic Python script for devices.
|
#
# IAS Basic device framework.
#
# Author: Joeri Hermans
#
import sys
import socket
import struct
# Global members, which are required for the communication
# with the remote IAS controller.
gDeviceIdentifier = sys.argv[1]
gControllerAddress = sys.argv[2]
gControllerPort = int(sys.argv[3])
gSocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
gSocket.connect((gControllerAddress,gControllerPort))
gRunning = True
def updateState( stateIdentifier , newValue ):
global gSocket
stateIdentifierLength = len(stateIdentifier)
newValueLength = len(newValue)
data = struct.pack("!BBB",0x01,stateIdentifierLength,newValueLength);
data += str.encode(stateIdentifier)
data += str.encode(newValue)
gSocket.sendall(data)
def authenticate():
global gDeviceIdentifier
global gSocket;
identifierLength = len(gDeviceIdentifier)
message = struct.pack("!BB",0x00,identifierLength) + bytes(gDeviceIdentifier.encode("ascii"));
gSocket.sendall(message);
def processFeature(featureIdentifier,parameter):
# TODO Implement
print("Executing " + featureIdentifier + " with " + parameter)
def processCommand():
global gSocket
global gRunning
data = gSocket.recv(3);
data = struct.unpack("!BBB",data)
if( data[0] != 0x01 ):
gRunning = False
return
featureIdentifierLength = data[1]
parameterLength = data[2]
featureIdentifier = gSocket.recv(featureIdentifierLength)
featureIdentifier = featureIdentifier.decode("ascii")
if( parameterLength > 0 ):
parameter = gSocket.recv(parameterLength)
else:
parameter = ""
parameter = parameter.decode("ascii")
processFeature(featureIdentifier,parameter)
def processCommands():
global gRunning
while( gRunning ):
try:
processCommand()
except:
gRunning = False
def main():
authenticate()
processCommands()
if( __name__ == "__main__" ):
main()
|
<commit_before><commit_msg>Add basic Python script for devices.<commit_after>
|
#
# IAS Basic device framework.
#
# Author: Joeri Hermans
#
import sys
import socket
import struct
# Global members, which are required for the communication
# with the remote IAS controller.
gDeviceIdentifier = sys.argv[1]
gControllerAddress = sys.argv[2]
gControllerPort = int(sys.argv[3])
gSocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
gSocket.connect((gControllerAddress,gControllerPort))
gRunning = True
def updateState( stateIdentifier , newValue ):
global gSocket
stateIdentifierLength = len(stateIdentifier)
newValueLength = len(newValue)
data = struct.pack("!BBB",0x01,stateIdentifierLength,newValueLength);
data += str.encode(stateIdentifier)
data += str.encode(newValue)
gSocket.sendall(data)
def authenticate():
global gDeviceIdentifier
global gSocket;
identifierLength = len(gDeviceIdentifier)
message = struct.pack("!BB",0x00,identifierLength) + bytes(gDeviceIdentifier.encode("ascii"));
gSocket.sendall(message);
def processFeature(featureIdentifier,parameter):
# TODO Implement
print("Executing " + featureIdentifier + " with " + parameter)
def processCommand():
global gSocket
global gRunning
data = gSocket.recv(3);
data = struct.unpack("!BBB",data)
if( data[0] != 0x01 ):
gRunning = False
return
featureIdentifierLength = data[1]
parameterLength = data[2]
featureIdentifier = gSocket.recv(featureIdentifierLength)
featureIdentifier = featureIdentifier.decode("ascii")
if( parameterLength > 0 ):
parameter = gSocket.recv(parameterLength)
else:
parameter = ""
parameter = parameter.decode("ascii")
processFeature(featureIdentifier,parameter)
def processCommands():
global gRunning
while( gRunning ):
try:
processCommand()
except:
gRunning = False
def main():
authenticate()
processCommands()
if( __name__ == "__main__" ):
main()
|
Add basic Python script for devices.#
# IAS Basic device framework.
#
# Author: Joeri Hermans
#
import sys
import socket
import struct
# Global members, which are required for the communication
# with the remote IAS controller.
gDeviceIdentifier = sys.argv[1]
gControllerAddress = sys.argv[2]
gControllerPort = int(sys.argv[3])
gSocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
gSocket.connect((gControllerAddress,gControllerPort))
gRunning = True
def updateState( stateIdentifier , newValue ):
global gSocket
stateIdentifierLength = len(stateIdentifier)
newValueLength = len(newValue)
data = struct.pack("!BBB",0x01,stateIdentifierLength,newValueLength);
data += str.encode(stateIdentifier)
data += str.encode(newValue)
gSocket.sendall(data)
def authenticate():
global gDeviceIdentifier
global gSocket;
identifierLength = len(gDeviceIdentifier)
message = struct.pack("!BB",0x00,identifierLength) + bytes(gDeviceIdentifier.encode("ascii"));
gSocket.sendall(message);
def processFeature(featureIdentifier,parameter):
# TODO Implement
print("Executing " + featureIdentifier + " with " + parameter)
def processCommand():
global gSocket
global gRunning
data = gSocket.recv(3);
data = struct.unpack("!BBB",data)
if( data[0] != 0x01 ):
gRunning = False
return
featureIdentifierLength = data[1]
parameterLength = data[2]
featureIdentifier = gSocket.recv(featureIdentifierLength)
featureIdentifier = featureIdentifier.decode("ascii")
if( parameterLength > 0 ):
parameter = gSocket.recv(parameterLength)
else:
parameter = ""
parameter = parameter.decode("ascii")
processFeature(featureIdentifier,parameter)
def processCommands():
global gRunning
while( gRunning ):
try:
processCommand()
except:
gRunning = False
def main():
authenticate()
processCommands()
if( __name__ == "__main__" ):
main()
|
<commit_before><commit_msg>Add basic Python script for devices.<commit_after>#
# IAS Basic device framework.
#
# Author: Joeri Hermans
#
import sys
import socket
import struct
# Global members, which are required for the communication
# with the remote IAS controller.
gDeviceIdentifier = sys.argv[1]
gControllerAddress = sys.argv[2]
gControllerPort = int(sys.argv[3])
gSocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
gSocket.connect((gControllerAddress,gControllerPort))
gRunning = True
def updateState( stateIdentifier , newValue ):
global gSocket
stateIdentifierLength = len(stateIdentifier)
newValueLength = len(newValue)
data = struct.pack("!BBB",0x01,stateIdentifierLength,newValueLength);
data += str.encode(stateIdentifier)
data += str.encode(newValue)
gSocket.sendall(data)
def authenticate():
global gDeviceIdentifier
global gSocket;
identifierLength = len(gDeviceIdentifier)
message = struct.pack("!BB",0x00,identifierLength) + bytes(gDeviceIdentifier.encode("ascii"));
gSocket.sendall(message);
def processFeature(featureIdentifier,parameter):
# TODO Implement
print("Executing " + featureIdentifier + " with " + parameter)
def processCommand():
global gSocket
global gRunning
data = gSocket.recv(3);
data = struct.unpack("!BBB",data)
if( data[0] != 0x01 ):
gRunning = False
return
featureIdentifierLength = data[1]
parameterLength = data[2]
featureIdentifier = gSocket.recv(featureIdentifierLength)
featureIdentifier = featureIdentifier.decode("ascii")
if( parameterLength > 0 ):
parameter = gSocket.recv(parameterLength)
else:
parameter = ""
parameter = parameter.decode("ascii")
processFeature(featureIdentifier,parameter)
def processCommands():
global gRunning
while( gRunning ):
try:
processCommand()
except:
gRunning = False
def main():
authenticate()
processCommands()
if( __name__ == "__main__" ):
main()
|
|
58c49557574bcbb66c824bbf743fdfbb354e7a03
|
h2o-py/tests/testdir_algos/naivebayes/pyunit_irisNB_cv.py
|
h2o-py/tests/testdir_algos/naivebayes/pyunit_irisNB_cv.py
|
from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.naive_bayes import H2ONaiveBayesEstimator
def nb_iris():
print("Importing iris_wheader.csv data...\n")
iris = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
iris.describe()
iris_nbayes = H2ONaiveBayesEstimator()
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3,fold_assignment="Modulo")
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris)
iris_nbayes.show()
print("And here it is:")
print(iris_nbayes.cross_validation_metrics_summary())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame()['mean'])
if __name__ == "__main__":
pyunit_utils.standalone_test(nb_iris)
else:
nb_iris()
|
Add PyUnit for Naive Bayes (cross)validation.
|
PUBDEV-2769: Add PyUnit for Naive Bayes (cross)validation.
|
Python
|
apache-2.0
|
mathemage/h2o-3,h2oai/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,mathemage/h2o-3,spennihana/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,YzPaul3/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-3,jangorecki/h2o-3
|
PUBDEV-2769: Add PyUnit for Naive Bayes (cross)validation.
|
from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.naive_bayes import H2ONaiveBayesEstimator
def nb_iris():
print("Importing iris_wheader.csv data...\n")
iris = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
iris.describe()
iris_nbayes = H2ONaiveBayesEstimator()
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3,fold_assignment="Modulo")
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris)
iris_nbayes.show()
print("And here it is:")
print(iris_nbayes.cross_validation_metrics_summary())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame()['mean'])
if __name__ == "__main__":
pyunit_utils.standalone_test(nb_iris)
else:
nb_iris()
|
<commit_before><commit_msg>PUBDEV-2769: Add PyUnit for Naive Bayes (cross)validation.<commit_after>
|
from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.naive_bayes import H2ONaiveBayesEstimator
def nb_iris():
print("Importing iris_wheader.csv data...\n")
iris = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
iris.describe()
iris_nbayes = H2ONaiveBayesEstimator()
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3,fold_assignment="Modulo")
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris)
iris_nbayes.show()
print("And here it is:")
print(iris_nbayes.cross_validation_metrics_summary())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame()['mean'])
if __name__ == "__main__":
pyunit_utils.standalone_test(nb_iris)
else:
nb_iris()
|
PUBDEV-2769: Add PyUnit for Naive Bayes (cross)validation.from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.naive_bayes import H2ONaiveBayesEstimator
def nb_iris():
print("Importing iris_wheader.csv data...\n")
iris = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
iris.describe()
iris_nbayes = H2ONaiveBayesEstimator()
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3,fold_assignment="Modulo")
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris)
iris_nbayes.show()
print("And here it is:")
print(iris_nbayes.cross_validation_metrics_summary())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame()['mean'])
if __name__ == "__main__":
pyunit_utils.standalone_test(nb_iris)
else:
nb_iris()
|
<commit_before><commit_msg>PUBDEV-2769: Add PyUnit for Naive Bayes (cross)validation.<commit_after>from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.naive_bayes import H2ONaiveBayesEstimator
def nb_iris():
print("Importing iris_wheader.csv data...\n")
iris = h2o.upload_file(pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
iris.describe()
iris_nbayes = H2ONaiveBayesEstimator()
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, validation_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3)
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris, seed=1234)
iris_nbayes.show()
iris_nbayes = H2ONaiveBayesEstimator(nfolds=3,fold_assignment="Modulo")
iris_nbayes.train(x=list(range(4)), y=4, training_frame=iris)
iris_nbayes.show()
print("And here it is:")
print(iris_nbayes.cross_validation_metrics_summary())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame())
print(iris_nbayes.cross_validation_metrics_summary().as_data_frame()['mean'])
if __name__ == "__main__":
pyunit_utils.standalone_test(nb_iris)
else:
nb_iris()
|
|
2f69ff34e9d0aa9ea4119e01222a1b0fc6161b39
|
ubuntu-cloud.py
|
ubuntu-cloud.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
import requests
# TODO(bc): provide these via config and/or argparse
MIRROR_PATH = '/mnt/mirror.os02/ubuntu-cloud'
RELEASE_LIST = ['trusty',
'utopic']
ARCH_LIST = ['amd64']
LABEL_LIST = ['release']
ITEM_LIST = ['disk1.img']
UPSTREAM_BASE = 'http://cloud-images.ubuntu.com'
UPSTREAM_FEED = 'releases/streams/v1/com.ubuntu.cloud:released:download.json'
def download_file(url, local_path, sum=None):
parent_dir = os.path.dirname(local_path)
try:
os.makedirs(parent_dir)
except Exception:
pass
resp = requests.get(url, stream=True)
with open(local_path, 'w') as ff:
for chunk in resp.iter_content(chunk_size=4096):
if not chunk:
continue
ff.write(chunk)
ff.flush()
return True
def mirror():
download_items = []
resp = requests.get('%s/%s' % (UPSTREAM_BASE, UPSTREAM_FEED))
for product_key, product in resp.json().get('products').iteritems():
if product['release'] not in RELEASE_LIST:
continue
if product['arch'] not in ARCH_LIST:
continue
for version_key, version in product['versions'].iteritems():
if version['label'] not in LABEL_LIST:
continue
for item_type, item in version['items'].iteritems():
if item_type not in ITEM_LIST:
continue
download_items.append(item)
download_items = sorted(download_items,
lambda x, y: cmp(y['path'], x['path']))
for ii in download_items:
local_path = '%s/%s' % (MIRROR_PATH, ii['path'])
url = '%s/%s' % (UPSTREAM_BASE, ii['path'])
print(local_path)
if os.path.exists(local_path):
continue
download_file(url, local_path)
if __name__ == '__main__':
mirror()
|
Add POC script to mirror Ubuntu cloud images
|
Add POC script to mirror Ubuntu cloud images
|
Python
|
mit
|
briancline/mirror-scripts,briancline/mirror-scripts
|
Add POC script to mirror Ubuntu cloud images
|
#!/usr/bin/env python
from __future__ import print_function
import os
import requests
# TODO(bc): provide these via config and/or argparse
MIRROR_PATH = '/mnt/mirror.os02/ubuntu-cloud'
RELEASE_LIST = ['trusty',
'utopic']
ARCH_LIST = ['amd64']
LABEL_LIST = ['release']
ITEM_LIST = ['disk1.img']
UPSTREAM_BASE = 'http://cloud-images.ubuntu.com'
UPSTREAM_FEED = 'releases/streams/v1/com.ubuntu.cloud:released:download.json'
def download_file(url, local_path, sum=None):
parent_dir = os.path.dirname(local_path)
try:
os.makedirs(parent_dir)
except Exception:
pass
resp = requests.get(url, stream=True)
with open(local_path, 'w') as ff:
for chunk in resp.iter_content(chunk_size=4096):
if not chunk:
continue
ff.write(chunk)
ff.flush()
return True
def mirror():
download_items = []
resp = requests.get('%s/%s' % (UPSTREAM_BASE, UPSTREAM_FEED))
for product_key, product in resp.json().get('products').iteritems():
if product['release'] not in RELEASE_LIST:
continue
if product['arch'] not in ARCH_LIST:
continue
for version_key, version in product['versions'].iteritems():
if version['label'] not in LABEL_LIST:
continue
for item_type, item in version['items'].iteritems():
if item_type not in ITEM_LIST:
continue
download_items.append(item)
download_items = sorted(download_items,
lambda x, y: cmp(y['path'], x['path']))
for ii in download_items:
local_path = '%s/%s' % (MIRROR_PATH, ii['path'])
url = '%s/%s' % (UPSTREAM_BASE, ii['path'])
print(local_path)
if os.path.exists(local_path):
continue
download_file(url, local_path)
if __name__ == '__main__':
mirror()
|
<commit_before><commit_msg>Add POC script to mirror Ubuntu cloud images<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import os
import requests
# TODO(bc): provide these via config and/or argparse
MIRROR_PATH = '/mnt/mirror.os02/ubuntu-cloud'
RELEASE_LIST = ['trusty',
'utopic']
ARCH_LIST = ['amd64']
LABEL_LIST = ['release']
ITEM_LIST = ['disk1.img']
UPSTREAM_BASE = 'http://cloud-images.ubuntu.com'
UPSTREAM_FEED = 'releases/streams/v1/com.ubuntu.cloud:released:download.json'
def download_file(url, local_path, sum=None):
parent_dir = os.path.dirname(local_path)
try:
os.makedirs(parent_dir)
except Exception:
pass
resp = requests.get(url, stream=True)
with open(local_path, 'w') as ff:
for chunk in resp.iter_content(chunk_size=4096):
if not chunk:
continue
ff.write(chunk)
ff.flush()
return True
def mirror():
download_items = []
resp = requests.get('%s/%s' % (UPSTREAM_BASE, UPSTREAM_FEED))
for product_key, product in resp.json().get('products').iteritems():
if product['release'] not in RELEASE_LIST:
continue
if product['arch'] not in ARCH_LIST:
continue
for version_key, version in product['versions'].iteritems():
if version['label'] not in LABEL_LIST:
continue
for item_type, item in version['items'].iteritems():
if item_type not in ITEM_LIST:
continue
download_items.append(item)
download_items = sorted(download_items,
lambda x, y: cmp(y['path'], x['path']))
for ii in download_items:
local_path = '%s/%s' % (MIRROR_PATH, ii['path'])
url = '%s/%s' % (UPSTREAM_BASE, ii['path'])
print(local_path)
if os.path.exists(local_path):
continue
download_file(url, local_path)
if __name__ == '__main__':
mirror()
|
Add POC script to mirror Ubuntu cloud images#!/usr/bin/env python
from __future__ import print_function
import os
import requests
# TODO(bc): provide these via config and/or argparse
MIRROR_PATH = '/mnt/mirror.os02/ubuntu-cloud'
RELEASE_LIST = ['trusty',
'utopic']
ARCH_LIST = ['amd64']
LABEL_LIST = ['release']
ITEM_LIST = ['disk1.img']
UPSTREAM_BASE = 'http://cloud-images.ubuntu.com'
UPSTREAM_FEED = 'releases/streams/v1/com.ubuntu.cloud:released:download.json'
def download_file(url, local_path, sum=None):
parent_dir = os.path.dirname(local_path)
try:
os.makedirs(parent_dir)
except Exception:
pass
resp = requests.get(url, stream=True)
with open(local_path, 'w') as ff:
for chunk in resp.iter_content(chunk_size=4096):
if not chunk:
continue
ff.write(chunk)
ff.flush()
return True
def mirror():
download_items = []
resp = requests.get('%s/%s' % (UPSTREAM_BASE, UPSTREAM_FEED))
for product_key, product in resp.json().get('products').iteritems():
if product['release'] not in RELEASE_LIST:
continue
if product['arch'] not in ARCH_LIST:
continue
for version_key, version in product['versions'].iteritems():
if version['label'] not in LABEL_LIST:
continue
for item_type, item in version['items'].iteritems():
if item_type not in ITEM_LIST:
continue
download_items.append(item)
download_items = sorted(download_items,
lambda x, y: cmp(y['path'], x['path']))
for ii in download_items:
local_path = '%s/%s' % (MIRROR_PATH, ii['path'])
url = '%s/%s' % (UPSTREAM_BASE, ii['path'])
print(local_path)
if os.path.exists(local_path):
continue
download_file(url, local_path)
if __name__ == '__main__':
mirror()
|
<commit_before><commit_msg>Add POC script to mirror Ubuntu cloud images<commit_after>#!/usr/bin/env python
from __future__ import print_function
import os
import requests
# TODO(bc): provide these via config and/or argparse
MIRROR_PATH = '/mnt/mirror.os02/ubuntu-cloud'
RELEASE_LIST = ['trusty',
'utopic']
ARCH_LIST = ['amd64']
LABEL_LIST = ['release']
ITEM_LIST = ['disk1.img']
UPSTREAM_BASE = 'http://cloud-images.ubuntu.com'
UPSTREAM_FEED = 'releases/streams/v1/com.ubuntu.cloud:released:download.json'
def download_file(url, local_path, sum=None):
parent_dir = os.path.dirname(local_path)
try:
os.makedirs(parent_dir)
except Exception:
pass
resp = requests.get(url, stream=True)
with open(local_path, 'w') as ff:
for chunk in resp.iter_content(chunk_size=4096):
if not chunk:
continue
ff.write(chunk)
ff.flush()
return True
def mirror():
download_items = []
resp = requests.get('%s/%s' % (UPSTREAM_BASE, UPSTREAM_FEED))
for product_key, product in resp.json().get('products').iteritems():
if product['release'] not in RELEASE_LIST:
continue
if product['arch'] not in ARCH_LIST:
continue
for version_key, version in product['versions'].iteritems():
if version['label'] not in LABEL_LIST:
continue
for item_type, item in version['items'].iteritems():
if item_type not in ITEM_LIST:
continue
download_items.append(item)
download_items = sorted(download_items,
lambda x, y: cmp(y['path'], x['path']))
for ii in download_items:
local_path = '%s/%s' % (MIRROR_PATH, ii['path'])
url = '%s/%s' % (UPSTREAM_BASE, ii['path'])
print(local_path)
if os.path.exists(local_path):
continue
download_file(url, local_path)
if __name__ == '__main__':
mirror()
|
|
6afa2a817387d24751b62845113e6f4de01025ba
|
code/tests/test_smoothing.py
|
code/tests/test_smoothing.py
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
Add seperate test function for smoothing
|
Add seperate test function for smoothing
|
Python
|
bsd-3-clause
|
berkeley-stat159/project-delta
|
Add seperate test function for smoothing
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
<commit_before><commit_msg>Add seperate test function for smoothing<commit_after>
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
Add seperate test function for smoothing"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
<commit_before><commit_msg>Add seperate test function for smoothing<commit_after>"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
|
981427d1efb4f3887fa1110e9a3ba8b6dd8910e0
|
json_parser/profiling.py
|
json_parser/profiling.py
|
from timeit import timeit
TEST1 = """
load_string(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST2 = """
load_string_faster(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST3 = """
json.loads(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
def time():
n = 50
print "Timing on input:", r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}'
print 'load_string: ', timeit(TEST1, "from json_parser import load_string", number=n)
print 'load_string_faster:', timeit(TEST2, "from json_parser import load_string_faster", number=n)
print 'json.loads: ', timeit(TEST3, "import json", number=n)
if __name__ == '__main__':
time()
|
Add timing comparisons to JSON parser.
|
Add timing comparisons to JSON parser.
|
Python
|
mit
|
py-in-the-sky/challenges,py-in-the-sky/challenges,py-in-the-sky/challenges
|
Add timing comparisons to JSON parser.
|
from timeit import timeit
TEST1 = """
load_string(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST2 = """
load_string_faster(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST3 = """
json.loads(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
def time():
n = 50
print "Timing on input:", r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}'
print 'load_string: ', timeit(TEST1, "from json_parser import load_string", number=n)
print 'load_string_faster:', timeit(TEST2, "from json_parser import load_string_faster", number=n)
print 'json.loads: ', timeit(TEST3, "import json", number=n)
if __name__ == '__main__':
time()
|
<commit_before><commit_msg>Add timing comparisons to JSON parser.<commit_after>
|
from timeit import timeit
TEST1 = """
load_string(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST2 = """
load_string_faster(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST3 = """
json.loads(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
def time():
n = 50
print "Timing on input:", r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}'
print 'load_string: ', timeit(TEST1, "from json_parser import load_string", number=n)
print 'load_string_faster:', timeit(TEST2, "from json_parser import load_string_faster", number=n)
print 'json.loads: ', timeit(TEST3, "import json", number=n)
if __name__ == '__main__':
time()
|
Add timing comparisons to JSON parser.from timeit import timeit
TEST1 = """
load_string(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST2 = """
load_string_faster(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST3 = """
json.loads(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
def time():
n = 50
print "Timing on input:", r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}'
print 'load_string: ', timeit(TEST1, "from json_parser import load_string", number=n)
print 'load_string_faster:', timeit(TEST2, "from json_parser import load_string_faster", number=n)
print 'json.loads: ', timeit(TEST3, "import json", number=n)
if __name__ == '__main__':
time()
|
<commit_before><commit_msg>Add timing comparisons to JSON parser.<commit_after>from timeit import timeit
TEST1 = """
load_string(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST2 = """
load_string_faster(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
TEST3 = """
json.loads(r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}')
"""
def time():
n = 50
print "Timing on input:", r'{ "one": { "two": [{ "three": { "four": null }}, false ], "five": 5 }}'
print 'load_string: ', timeit(TEST1, "from json_parser import load_string", number=n)
print 'load_string_faster:', timeit(TEST2, "from json_parser import load_string_faster", number=n)
print 'json.loads: ', timeit(TEST3, "import json", number=n)
if __name__ == '__main__':
time()
|
|
e7e9c0c624e53f34b6b77f8de99cbcc5cda4f3fa
|
triangular_matrix.py
|
triangular_matrix.py
|
import pandas as pd
import numpy as np
import itertools
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-t', '--triangular-matrix', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-c', '--chrom', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-w', '--windows-file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
output_file = args.triangular_matrix.split('.')
output_file[-1] = 'full_matrix.npz'
output_file = '.'.join(output_file)
def open_triangular_matrix(filepath):
with open(filepath) as in_data:
arr = [[float(i) for i in line.split()] for line in in_data]
N = len(arr[-1])
full_array = np.zeros((N,N))
lower_i = np.tril_indices_from(full_array)
upper_i = np.triu_indices_from(full_array)
full_array[:] = np.NAN
full_array[lower_i] = list(itertools.chain(*arr))
full_array[upper_i] = full_array.T[upper_i]
full_array[full_array > 1.] = np.NAN
return full_array
def open_windows(filepath, chrom):
data = pd.read_csv(filepath, delim_whitespace=True, header=None)
return np.array(data[data[0] == chrom])
scores = open_triangular_matrix(args.triangular_matrix)
windows = open_windows(args.windows_file, args.chrom)
assert len(scores) == len(windows)
np.savez_compressed(output_file, scores=scores, windows=windows)
|
Add script to turn triangular matrices into squares
|
Add script to turn triangular matrices into squares
|
Python
|
apache-2.0
|
pombo-lab/gamtools,pombo-lab/gamtools
|
Add script to turn triangular matrices into squares
|
import pandas as pd
import numpy as np
import itertools
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-t', '--triangular-matrix', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-c', '--chrom', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-w', '--windows-file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
output_file = args.triangular_matrix.split('.')
output_file[-1] = 'full_matrix.npz'
output_file = '.'.join(output_file)
def open_triangular_matrix(filepath):
with open(filepath) as in_data:
arr = [[float(i) for i in line.split()] for line in in_data]
N = len(arr[-1])
full_array = np.zeros((N,N))
lower_i = np.tril_indices_from(full_array)
upper_i = np.triu_indices_from(full_array)
full_array[:] = np.NAN
full_array[lower_i] = list(itertools.chain(*arr))
full_array[upper_i] = full_array.T[upper_i]
full_array[full_array > 1.] = np.NAN
return full_array
def open_windows(filepath, chrom):
data = pd.read_csv(filepath, delim_whitespace=True, header=None)
return np.array(data[data[0] == chrom])
scores = open_triangular_matrix(args.triangular_matrix)
windows = open_windows(args.windows_file, args.chrom)
assert len(scores) == len(windows)
np.savez_compressed(output_file, scores=scores, windows=windows)
|
<commit_before><commit_msg>Add script to turn triangular matrices into squares<commit_after>
|
import pandas as pd
import numpy as np
import itertools
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-t', '--triangular-matrix', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-c', '--chrom', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-w', '--windows-file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
output_file = args.triangular_matrix.split('.')
output_file[-1] = 'full_matrix.npz'
output_file = '.'.join(output_file)
def open_triangular_matrix(filepath):
with open(filepath) as in_data:
arr = [[float(i) for i in line.split()] for line in in_data]
N = len(arr[-1])
full_array = np.zeros((N,N))
lower_i = np.tril_indices_from(full_array)
upper_i = np.triu_indices_from(full_array)
full_array[:] = np.NAN
full_array[lower_i] = list(itertools.chain(*arr))
full_array[upper_i] = full_array.T[upper_i]
full_array[full_array > 1.] = np.NAN
return full_array
def open_windows(filepath, chrom):
data = pd.read_csv(filepath, delim_whitespace=True, header=None)
return np.array(data[data[0] == chrom])
scores = open_triangular_matrix(args.triangular_matrix)
windows = open_windows(args.windows_file, args.chrom)
assert len(scores) == len(windows)
np.savez_compressed(output_file, scores=scores, windows=windows)
|
Add script to turn triangular matrices into squaresimport pandas as pd
import numpy as np
import itertools
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-t', '--triangular-matrix', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-c', '--chrom', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-w', '--windows-file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
output_file = args.triangular_matrix.split('.')
output_file[-1] = 'full_matrix.npz'
output_file = '.'.join(output_file)
def open_triangular_matrix(filepath):
with open(filepath) as in_data:
arr = [[float(i) for i in line.split()] for line in in_data]
N = len(arr[-1])
full_array = np.zeros((N,N))
lower_i = np.tril_indices_from(full_array)
upper_i = np.triu_indices_from(full_array)
full_array[:] = np.NAN
full_array[lower_i] = list(itertools.chain(*arr))
full_array[upper_i] = full_array.T[upper_i]
full_array[full_array > 1.] = np.NAN
return full_array
def open_windows(filepath, chrom):
data = pd.read_csv(filepath, delim_whitespace=True, header=None)
return np.array(data[data[0] == chrom])
scores = open_triangular_matrix(args.triangular_matrix)
windows = open_windows(args.windows_file, args.chrom)
assert len(scores) == len(windows)
np.savez_compressed(output_file, scores=scores, windows=windows)
|
<commit_before><commit_msg>Add script to turn triangular matrices into squares<commit_after>import pandas as pd
import numpy as np
import itertools
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-t', '--triangular-matrix', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-c', '--chrom', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
parser.add_argument('-w', '--windows-file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
output_file = args.triangular_matrix.split('.')
output_file[-1] = 'full_matrix.npz'
output_file = '.'.join(output_file)
def open_triangular_matrix(filepath):
with open(filepath) as in_data:
arr = [[float(i) for i in line.split()] for line in in_data]
N = len(arr[-1])
full_array = np.zeros((N,N))
lower_i = np.tril_indices_from(full_array)
upper_i = np.triu_indices_from(full_array)
full_array[:] = np.NAN
full_array[lower_i] = list(itertools.chain(*arr))
full_array[upper_i] = full_array.T[upper_i]
full_array[full_array > 1.] = np.NAN
return full_array
def open_windows(filepath, chrom):
data = pd.read_csv(filepath, delim_whitespace=True, header=None)
return np.array(data[data[0] == chrom])
scores = open_triangular_matrix(args.triangular_matrix)
windows = open_windows(args.windows_file, args.chrom)
assert len(scores) == len(windows)
np.savez_compressed(output_file, scores=scores, windows=windows)
|
|
a581253c6daee875855ac1677717eb1cf464e704
|
froide/publicbody/migrations/0021_proposedpublicbody.py
|
froide/publicbody/migrations/0021_proposedpublicbody.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
|
Add proposed publicbody proxy model migration
|
Add proposed publicbody proxy model migration
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide
|
Add proposed publicbody proxy model migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
|
<commit_before><commit_msg>Add proposed publicbody proxy model migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
|
Add proposed publicbody proxy model migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
|
<commit_before><commit_msg>Add proposed publicbody proxy model migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
|
|
dc4939b981f508c966c02580ab45091f05265140
|
contrib/nic_file_fixcase.py
|
contrib/nic_file_fixcase.py
|
#!/bin/python
# coding: utf-8
import sys
import lglass.nic
import lglass.object
old_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=False)
new_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=True)
for obj in old_db.find():
old_db.delete(obj)
new_db.save(obj)
|
Add script to fix case in file databases
|
Add script to fix case in file databases
|
Python
|
mit
|
fritz0705/lglass
|
Add script to fix case in file databases
|
#!/bin/python
# coding: utf-8
import sys
import lglass.nic
import lglass.object
old_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=False)
new_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=True)
for obj in old_db.find():
old_db.delete(obj)
new_db.save(obj)
|
<commit_before><commit_msg>Add script to fix case in file databases<commit_after>
|
#!/bin/python
# coding: utf-8
import sys
import lglass.nic
import lglass.object
old_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=False)
new_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=True)
for obj in old_db.find():
old_db.delete(obj)
new_db.save(obj)
|
Add script to fix case in file databases#!/bin/python
# coding: utf-8
import sys
import lglass.nic
import lglass.object
old_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=False)
new_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=True)
for obj in old_db.find():
old_db.delete(obj)
new_db.save(obj)
|
<commit_before><commit_msg>Add script to fix case in file databases<commit_after>#!/bin/python
# coding: utf-8
import sys
import lglass.nic
import lglass.object
old_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=False)
new_db = lglass.nic.FileDatabase(sys.argv[1], case_insensitive=True)
for obj in old_db.find():
old_db.delete(obj)
new_db.save(obj)
|
|
debf6402b30606aae07828e5f4cd92d3fcb3834d
|
fetch_configs/devtools-internal.py
|
fetch_configs/devtools-internal.py
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class DevToolsInternal(config_util.Config):
"""Basic Config class for the whole set of DevTools."""
@staticmethod
def fetch_spec(props):
url = 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git'
solution = {
'name' : 'devtools-internal',
'url' : url,
'deps_file' : 'DEPS',
'managed' : False,
'custom_deps' : {},
}
spec = {
'solutions': [solution],
'with_branch_heads': True,
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'devtools-internal'
def main(argv=None):
return DevToolsInternal().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add a fetch config for devtools (internal).
|
Add a fetch config for devtools (internal).
This change has precedent, c.f. 04d16a561 for infra_internal.
R=yangguo
Bug: None
Change-Id: I089b114bcfe110ad138284bb71802cfc5d7f157d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3782488
Reviewed-by: Yang Guo <1cf0af8245a3a17a632a2c73531392b0f9761c76@chromium.org>
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Thiago Perrotta <660c40383373240e945f6beab69fd6841cb4aa33@chromium.org>
|
Python
|
bsd-3-clause
|
CoherentLabs/depot_tools,CoherentLabs/depot_tools
|
Add a fetch config for devtools (internal).
This change has precedent, c.f. 04d16a561 for infra_internal.
R=yangguo
Bug: None
Change-Id: I089b114bcfe110ad138284bb71802cfc5d7f157d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3782488
Reviewed-by: Yang Guo <1cf0af8245a3a17a632a2c73531392b0f9761c76@chromium.org>
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Thiago Perrotta <660c40383373240e945f6beab69fd6841cb4aa33@chromium.org>
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class DevToolsInternal(config_util.Config):
"""Basic Config class for the whole set of DevTools."""
@staticmethod
def fetch_spec(props):
url = 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git'
solution = {
'name' : 'devtools-internal',
'url' : url,
'deps_file' : 'DEPS',
'managed' : False,
'custom_deps' : {},
}
spec = {
'solutions': [solution],
'with_branch_heads': True,
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'devtools-internal'
def main(argv=None):
return DevToolsInternal().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add a fetch config for devtools (internal).
This change has precedent, c.f. 04d16a561 for infra_internal.
R=yangguo
Bug: None
Change-Id: I089b114bcfe110ad138284bb71802cfc5d7f157d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3782488
Reviewed-by: Yang Guo <1cf0af8245a3a17a632a2c73531392b0f9761c76@chromium.org>
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Thiago Perrotta <660c40383373240e945f6beab69fd6841cb4aa33@chromium.org><commit_after>
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class DevToolsInternal(config_util.Config):
"""Basic Config class for the whole set of DevTools."""
@staticmethod
def fetch_spec(props):
url = 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git'
solution = {
'name' : 'devtools-internal',
'url' : url,
'deps_file' : 'DEPS',
'managed' : False,
'custom_deps' : {},
}
spec = {
'solutions': [solution],
'with_branch_heads': True,
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'devtools-internal'
def main(argv=None):
return DevToolsInternal().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add a fetch config for devtools (internal).
This change has precedent, c.f. 04d16a561 for infra_internal.
R=yangguo
Bug: None
Change-Id: I089b114bcfe110ad138284bb71802cfc5d7f157d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3782488
Reviewed-by: Yang Guo <1cf0af8245a3a17a632a2c73531392b0f9761c76@chromium.org>
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Thiago Perrotta <660c40383373240e945f6beab69fd6841cb4aa33@chromium.org># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class DevToolsInternal(config_util.Config):
"""Basic Config class for the whole set of DevTools."""
@staticmethod
def fetch_spec(props):
url = 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git'
solution = {
'name' : 'devtools-internal',
'url' : url,
'deps_file' : 'DEPS',
'managed' : False,
'custom_deps' : {},
}
spec = {
'solutions': [solution],
'with_branch_heads': True,
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'devtools-internal'
def main(argv=None):
return DevToolsInternal().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add a fetch config for devtools (internal).
This change has precedent, c.f. 04d16a561 for infra_internal.
R=yangguo
Bug: None
Change-Id: I089b114bcfe110ad138284bb71802cfc5d7f157d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3782488
Reviewed-by: Yang Guo <1cf0af8245a3a17a632a2c73531392b0f9761c76@chromium.org>
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Thiago Perrotta <660c40383373240e945f6beab69fd6841cb4aa33@chromium.org><commit_after># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class DevToolsInternal(config_util.Config):
"""Basic Config class for the whole set of DevTools."""
@staticmethod
def fetch_spec(props):
url = 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git'
solution = {
'name' : 'devtools-internal',
'url' : url,
'deps_file' : 'DEPS',
'managed' : False,
'custom_deps' : {},
}
spec = {
'solutions': [solution],
'with_branch_heads': True,
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'devtools-internal'
def main(argv=None):
return DevToolsInternal().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
b4f1f1398772d26c8eb20a80c4df76b601e406dd
|
st2api/tests/unit/controllers/v1/test_service_registry.py
|
st2api/tests/unit/controllers/v1/test_service_registry.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.service_setup import teardown as common_teardown
from st2common.service_setup import register_service_in_service_registry
from st2common.util import system_info
from st2common.services.coordination import get_member_id
from st2tests import config as tests_config
from tests.base import FunctionalTest
__all__ = [
'ServiceyRegistryControllerTestCase'
]
class ServiceyRegistryControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(ServiceyRegistryControllerTestCase, cls).setUpClass()
tests_config.parse_args()
# NOTE: We mock call common_setup to emulate service being registered in the service
# registry during bootstrap phase
register_service_in_service_registry(service='mock_service',
capabilities={'key1': 'value1',
'name': 'mock_service'},
start_heart=True)
@classmethod
def tearDownClass(cls):
super(ServiceyRegistryControllerTestCase, cls).tearDownClass()
common_teardown()
def test_get_groups(self):
list_resp = self.app.get('/v1/service_registry/groups')
self.assertEqual(list_resp.status_int, 200)
self.assertEqual(list_resp.json, {'groups': ['mock_service']})
def test_get_group_members(self):
proc_info = system_info.get_process_info()
member_id = get_member_id()
# 1. Group doesn't exist
resp = self.app.get('/v1/service_registry/groups/doesnt-exist/members', expect_errors=True)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.json['faultstring'], 'Group with ID "doesnt-exist" not found.')
# 2. Group exists and has a single member
resp = self.app.get('/v1/service_registry/groups/mock_service/members')
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, {
'members': [
{
'member_id': member_id,
'capabilities': {
'key1': 'value1',
'name': 'mock_service',
'hostname': proc_info['hostname'],
'pid': proc_info['pid']
}
}
]
})
|
Add test cases for new service registry related API endpoint.
|
Add test cases for new service registry related API endpoint.
|
Python
|
apache-2.0
|
nzlosh/st2,Plexxi/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2
|
Add test cases for new service registry related API endpoint.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.service_setup import teardown as common_teardown
from st2common.service_setup import register_service_in_service_registry
from st2common.util import system_info
from st2common.services.coordination import get_member_id
from st2tests import config as tests_config
from tests.base import FunctionalTest
__all__ = [
'ServiceyRegistryControllerTestCase'
]
class ServiceyRegistryControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(ServiceyRegistryControllerTestCase, cls).setUpClass()
tests_config.parse_args()
# NOTE: We mock call common_setup to emulate service being registered in the service
# registry during bootstrap phase
register_service_in_service_registry(service='mock_service',
capabilities={'key1': 'value1',
'name': 'mock_service'},
start_heart=True)
@classmethod
def tearDownClass(cls):
super(ServiceyRegistryControllerTestCase, cls).tearDownClass()
common_teardown()
def test_get_groups(self):
list_resp = self.app.get('/v1/service_registry/groups')
self.assertEqual(list_resp.status_int, 200)
self.assertEqual(list_resp.json, {'groups': ['mock_service']})
def test_get_group_members(self):
proc_info = system_info.get_process_info()
member_id = get_member_id()
# 1. Group doesn't exist
resp = self.app.get('/v1/service_registry/groups/doesnt-exist/members', expect_errors=True)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.json['faultstring'], 'Group with ID "doesnt-exist" not found.')
# 2. Group exists and has a single member
resp = self.app.get('/v1/service_registry/groups/mock_service/members')
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, {
'members': [
{
'member_id': member_id,
'capabilities': {
'key1': 'value1',
'name': 'mock_service',
'hostname': proc_info['hostname'],
'pid': proc_info['pid']
}
}
]
})
|
<commit_before><commit_msg>Add test cases for new service registry related API endpoint.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.service_setup import teardown as common_teardown
from st2common.service_setup import register_service_in_service_registry
from st2common.util import system_info
from st2common.services.coordination import get_member_id
from st2tests import config as tests_config
from tests.base import FunctionalTest
__all__ = [
'ServiceyRegistryControllerTestCase'
]
class ServiceyRegistryControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(ServiceyRegistryControllerTestCase, cls).setUpClass()
tests_config.parse_args()
# NOTE: We mock call common_setup to emulate service being registered in the service
# registry during bootstrap phase
register_service_in_service_registry(service='mock_service',
capabilities={'key1': 'value1',
'name': 'mock_service'},
start_heart=True)
@classmethod
def tearDownClass(cls):
super(ServiceyRegistryControllerTestCase, cls).tearDownClass()
common_teardown()
def test_get_groups(self):
list_resp = self.app.get('/v1/service_registry/groups')
self.assertEqual(list_resp.status_int, 200)
self.assertEqual(list_resp.json, {'groups': ['mock_service']})
def test_get_group_members(self):
proc_info = system_info.get_process_info()
member_id = get_member_id()
# 1. Group doesn't exist
resp = self.app.get('/v1/service_registry/groups/doesnt-exist/members', expect_errors=True)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.json['faultstring'], 'Group with ID "doesnt-exist" not found.')
# 2. Group exists and has a single member
resp = self.app.get('/v1/service_registry/groups/mock_service/members')
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, {
'members': [
{
'member_id': member_id,
'capabilities': {
'key1': 'value1',
'name': 'mock_service',
'hostname': proc_info['hostname'],
'pid': proc_info['pid']
}
}
]
})
|
Add test cases for new service registry related API endpoint.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.service_setup import teardown as common_teardown
from st2common.service_setup import register_service_in_service_registry
from st2common.util import system_info
from st2common.services.coordination import get_member_id
from st2tests import config as tests_config
from tests.base import FunctionalTest
__all__ = [
'ServiceyRegistryControllerTestCase'
]
class ServiceyRegistryControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(ServiceyRegistryControllerTestCase, cls).setUpClass()
tests_config.parse_args()
# NOTE: We mock call common_setup to emulate service being registered in the service
# registry during bootstrap phase
register_service_in_service_registry(service='mock_service',
capabilities={'key1': 'value1',
'name': 'mock_service'},
start_heart=True)
@classmethod
def tearDownClass(cls):
super(ServiceyRegistryControllerTestCase, cls).tearDownClass()
common_teardown()
def test_get_groups(self):
list_resp = self.app.get('/v1/service_registry/groups')
self.assertEqual(list_resp.status_int, 200)
self.assertEqual(list_resp.json, {'groups': ['mock_service']})
def test_get_group_members(self):
proc_info = system_info.get_process_info()
member_id = get_member_id()
# 1. Group doesn't exist
resp = self.app.get('/v1/service_registry/groups/doesnt-exist/members', expect_errors=True)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.json['faultstring'], 'Group with ID "doesnt-exist" not found.')
# 2. Group exists and has a single member
resp = self.app.get('/v1/service_registry/groups/mock_service/members')
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, {
'members': [
{
'member_id': member_id,
'capabilities': {
'key1': 'value1',
'name': 'mock_service',
'hostname': proc_info['hostname'],
'pid': proc_info['pid']
}
}
]
})
|
<commit_before><commit_msg>Add test cases for new service registry related API endpoint.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.service_setup import teardown as common_teardown
from st2common.service_setup import register_service_in_service_registry
from st2common.util import system_info
from st2common.services.coordination import get_member_id
from st2tests import config as tests_config
from tests.base import FunctionalTest
__all__ = [
'ServiceyRegistryControllerTestCase'
]
class ServiceyRegistryControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(ServiceyRegistryControllerTestCase, cls).setUpClass()
tests_config.parse_args()
# NOTE: We mock call common_setup to emulate service being registered in the service
# registry during bootstrap phase
register_service_in_service_registry(service='mock_service',
capabilities={'key1': 'value1',
'name': 'mock_service'},
start_heart=True)
@classmethod
def tearDownClass(cls):
super(ServiceyRegistryControllerTestCase, cls).tearDownClass()
common_teardown()
def test_get_groups(self):
list_resp = self.app.get('/v1/service_registry/groups')
self.assertEqual(list_resp.status_int, 200)
self.assertEqual(list_resp.json, {'groups': ['mock_service']})
def test_get_group_members(self):
proc_info = system_info.get_process_info()
member_id = get_member_id()
# 1. Group doesn't exist
resp = self.app.get('/v1/service_registry/groups/doesnt-exist/members', expect_errors=True)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.json['faultstring'], 'Group with ID "doesnt-exist" not found.')
# 2. Group exists and has a single member
resp = self.app.get('/v1/service_registry/groups/mock_service/members')
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, {
'members': [
{
'member_id': member_id,
'capabilities': {
'key1': 'value1',
'name': 'mock_service',
'hostname': proc_info['hostname'],
'pid': proc_info['pid']
}
}
]
})
|
|
99c3b830efac69db667ba0b97b5b77c93bca0774
|
candidates/management/commands/candidates_remove_bogus_dates.py
|
candidates/management/commands/candidates_remove_bogus_dates.py
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
def strip_bogus_fields(data, bogus_field_keys):
for key in bogus_field_keys:
if key in data:
print "Stripping out", key
del data[key]
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
msg = "Person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
strip_bogus_fields(
person,
[
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
strip_bogus_fields(
image,
[
'birth_date',
'death_date',
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
Add a command to remove some bogus data key / value pairs
|
Add a command to remove some bogus data key / value pairs
|
Python
|
agpl-3.0
|
mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,openstate/yournextrepresentative,openstate/yournextrepresentative
|
Add a command to remove some bogus data key / value pairs
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
def strip_bogus_fields(data, bogus_field_keys):
for key in bogus_field_keys:
if key in data:
print "Stripping out", key
del data[key]
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
msg = "Person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
strip_bogus_fields(
person,
[
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
strip_bogus_fields(
image,
[
'birth_date',
'death_date',
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
<commit_before><commit_msg>Add a command to remove some bogus data key / value pairs<commit_after>
|
import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
def strip_bogus_fields(data, bogus_field_keys):
for key in bogus_field_keys:
if key in data:
print "Stripping out", key
del data[key]
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
msg = "Person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
strip_bogus_fields(
person,
[
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
strip_bogus_fields(
image,
[
'birth_date',
'death_date',
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
Add a command to remove some bogus data key / value pairsimport sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
def strip_bogus_fields(data, bogus_field_keys):
for key in bogus_field_keys:
if key in data:
print "Stripping out", key
del data[key]
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
msg = "Person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
strip_bogus_fields(
person,
[
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
strip_bogus_fields(
image,
[
'birth_date',
'death_date',
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
<commit_before><commit_msg>Add a command to remove some bogus data key / value pairs<commit_after>import sys
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from candidates.update import fix_dates
from django.core.management.base import BaseCommand
from slumber.exceptions import HttpClientError
def strip_bogus_fields(data, bogus_field_keys):
for key in bogus_field_keys:
if key in data:
print "Stripping out", key
del data[key]
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
embed='',
per_page=100
):
msg = "Person {0}persons/{1}"
print msg.format(self.get_base_url(), person['id'])
strip_bogus_fields(
person,
[
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
for image in person.get('images', []):
image.pop('_id', None)
# Some images have an empty 'created' field, which
# causes an Elasticsearch indexing error, so remove
# that if it's the case:
if not image.get('created'):
image.pop('created', None)
strip_bogus_fields(
image,
[
'birth_date',
'death_date',
'founding_date',
'dissolution_date',
'start_date',
'end_date'
]
)
fix_dates(person)
try:
self.api.persons(person['id']).put(person)
except HttpClientError as e:
print "HttpClientError", e.content
sys.exit(1)
|
|
782fc597ab79acb3cbd0d9dffb4ed77cc561d894
|
gimlet/tests/test_session.py
|
gimlet/tests/test_session.py
|
from unittest import TestCase
from webob import Request
from gimlet.factories import session_factory_factory
class TestSession(TestCase):
def _make_session(self, secret='secret', **options):
request = Request.blank('/')
return session_factory_factory(secret, **options)(request)
def test_session(self):
sess = self._make_session()
sess['a'] = 'a'
self.assertIn('a', sess)
self.assertIn('a', sess.channels['insecure'])
def test_session_secure_nonperm(self):
sess = self._make_session(secure=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_nonperm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
def test_session_secure_perm(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
def test_session_set_insecure(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess.set('a', 'a', secure=False)
self.assertIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
|
Add unit tests for Session
|
Add unit tests for Session
|
Python
|
mit
|
storborg/gimlet
|
Add unit tests for Session
|
from unittest import TestCase
from webob import Request
from gimlet.factories import session_factory_factory
class TestSession(TestCase):
def _make_session(self, secret='secret', **options):
request = Request.blank('/')
return session_factory_factory(secret, **options)(request)
def test_session(self):
sess = self._make_session()
sess['a'] = 'a'
self.assertIn('a', sess)
self.assertIn('a', sess.channels['insecure'])
def test_session_secure_nonperm(self):
sess = self._make_session(secure=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_nonperm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
def test_session_secure_perm(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
def test_session_set_insecure(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess.set('a', 'a', secure=False)
self.assertIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
|
<commit_before><commit_msg>Add unit tests for Session<commit_after>
|
from unittest import TestCase
from webob import Request
from gimlet.factories import session_factory_factory
class TestSession(TestCase):
def _make_session(self, secret='secret', **options):
request = Request.blank('/')
return session_factory_factory(secret, **options)(request)
def test_session(self):
sess = self._make_session()
sess['a'] = 'a'
self.assertIn('a', sess)
self.assertIn('a', sess.channels['insecure'])
def test_session_secure_nonperm(self):
sess = self._make_session(secure=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_nonperm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
def test_session_secure_perm(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
def test_session_set_insecure(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess.set('a', 'a', secure=False)
self.assertIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
|
Add unit tests for Sessionfrom unittest import TestCase
from webob import Request
from gimlet.factories import session_factory_factory
class TestSession(TestCase):
def _make_session(self, secret='secret', **options):
request = Request.blank('/')
return session_factory_factory(secret, **options)(request)
def test_session(self):
sess = self._make_session()
sess['a'] = 'a'
self.assertIn('a', sess)
self.assertIn('a', sess.channels['insecure'])
def test_session_secure_nonperm(self):
sess = self._make_session(secure=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_nonperm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
def test_session_secure_perm(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
def test_session_set_insecure(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess.set('a', 'a', secure=False)
self.assertIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
|
<commit_before><commit_msg>Add unit tests for Session<commit_after>from unittest import TestCase
from webob import Request
from gimlet.factories import session_factory_factory
class TestSession(TestCase):
def _make_session(self, secret='secret', **options):
request = Request.blank('/')
return session_factory_factory(secret, **options)(request)
def test_session(self):
sess = self._make_session()
sess['a'] = 'a'
self.assertIn('a', sess)
self.assertIn('a', sess.channels['insecure'])
def test_session_secure_nonperm(self):
sess = self._make_session(secure=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_nonperm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
def test_session_secure_perm(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess['a'] = 'a'
self.assertIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
def test_session_set_insecure(self):
sess = self._make_session(secure=True, permanent=True, fake_https=True)
sess.set('a', 'a', secure=False)
self.assertIn('a', sess.channels['insecure'])
self.assertNotIn('a', sess.channels['secure_perm'])
self.assertNotIn('a', sess.channels['secure_nonperm'])
|
|
220497316492ea2153cb86e54d2c92606f5ab4cb
|
epochTime.py
|
epochTime.py
|
from datetime import datetime
import time;
#t = time.mktime(time.strptime("29.08.2011 11:05:02", "%d.%m.%Y %H:%M:%S"));
with open('Hashes-17.csv') as data:
for lines in data:
#print lines
splitLines=lines.split(',')
print splitLines[2]
temp='"'+splitLines[2]+'"'
#print temp
#print datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
epochTime=time.mktime(time.strptime(splitLines[2],"%Y-%m-%d %H:%M:%S.%f"))
print epochTime
print lines
|
Convert human readable time to epoch.
|
Convert human readable time to epoch.
Time utility
|
Python
|
mit
|
inimitableharish/bitsandpieces
|
Convert human readable time to epoch.
Time utility
|
from datetime import datetime
import time;
#t = time.mktime(time.strptime("29.08.2011 11:05:02", "%d.%m.%Y %H:%M:%S"));
with open('Hashes-17.csv') as data:
for lines in data:
#print lines
splitLines=lines.split(',')
print splitLines[2]
temp='"'+splitLines[2]+'"'
#print temp
#print datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
epochTime=time.mktime(time.strptime(splitLines[2],"%Y-%m-%d %H:%M:%S.%f"))
print epochTime
print lines
|
<commit_before><commit_msg>Convert human readable time to epoch.
Time utility<commit_after>
|
from datetime import datetime
import time;
#t = time.mktime(time.strptime("29.08.2011 11:05:02", "%d.%m.%Y %H:%M:%S"));
with open('Hashes-17.csv') as data:
for lines in data:
#print lines
splitLines=lines.split(',')
print splitLines[2]
temp='"'+splitLines[2]+'"'
#print temp
#print datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
epochTime=time.mktime(time.strptime(splitLines[2],"%Y-%m-%d %H:%M:%S.%f"))
print epochTime
print lines
|
Convert human readable time to epoch.
Time utility
from datetime import datetime
import time;
#t = time.mktime(time.strptime("29.08.2011 11:05:02", "%d.%m.%Y %H:%M:%S"));
with open('Hashes-17.csv') as data:
for lines in data:
#print lines
splitLines=lines.split(',')
print splitLines[2]
temp='"'+splitLines[2]+'"'
#print temp
#print datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
epochTime=time.mktime(time.strptime(splitLines[2],"%Y-%m-%d %H:%M:%S.%f"))
print epochTime
print lines
|
<commit_before><commit_msg>Convert human readable time to epoch.
Time utility<commit_after>
from datetime import datetime
import time;
#t = time.mktime(time.strptime("29.08.2011 11:05:02", "%d.%m.%Y %H:%M:%S"));
with open('Hashes-17.csv') as data:
for lines in data:
#print lines
splitLines=lines.split(',')
print splitLines[2]
temp='"'+splitLines[2]+'"'
#print temp
#print datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
epochTime=time.mktime(time.strptime(splitLines[2],"%Y-%m-%d %H:%M:%S.%f"))
print epochTime
print lines
|
|
b465435a8aa3be325875a41d196288349b59359f
|
src/mmw/apps/modeling/migrations/0024_fix_gwlfe_gis_data.py
|
src/mmw/apps/modeling/migrations/0024_fix_gwlfe_gis_data.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
Reset models affected by modification bug
|
Reset models affected by modification bug
Resets the possibly affected multi-year model results by removing the
calculated GIS parts, forcing a recomputation when accessed again.
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
|
Reset models affected by modification bug
Resets the possibly affected multi-year model results by removing the
calculated GIS parts, forcing a recomputation when accessed again.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Reset models affected by modification bug
Resets the possibly affected multi-year model results by removing the
calculated GIS parts, forcing a recomputation when accessed again.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
Reset models affected by modification bug
Resets the possibly affected multi-year model results by removing the
calculated GIS parts, forcing a recomputation when accessed again.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Reset models affected by modification bug
Resets the possibly affected multi-year model results by removing the
calculated GIS parts, forcing a recomputation when accessed again.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
|
7df2ef1e850cefd5fcbd62d1dd27bae1bef975a1
|
skimage/io/tests/test_image.py
|
skimage/io/tests/test_image.py
|
from skimage.io import Image
from numpy.testing import assert_equal, assert_array_equal
def test_tags():
f = Image([1, 2, 3], foo='bar', sigma='delta')
g = Image([3, 2, 1], sun='moon')
h = Image([1, 1, 1])
assert_equal(f.tags['foo'], 'bar')
assert_array_equal((g + 2).tags['sun'], 'moon')
assert_equal(h.tags, {})
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
Add tests for image tags.
|
Add tests for image tags.
|
Python
|
bsd-3-clause
|
pratapvardhan/scikit-image,emon10005/scikit-image,SamHames/scikit-image,chintak/scikit-image,michaelpacer/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,chintak/scikit-image,michaelpacer/scikit-image,robintw/scikit-image,emon10005/scikit-image,paalge/scikit-image,dpshelio/scikit-image,ajaybhat/scikit-image,WarrenWeckesser/scikits-image,SamHames/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,youprofit/scikit-image,blink1073/scikit-image,SamHames/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,ClinicalGraphics/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,almarklein/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,pratapvardhan/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,robintw/scikit-image,Britefury/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,chintak/scikit-image,keflavich/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,bsipocz/scikit-image,Britefury/scikit-image,bennlich/scikit-image,newville/scikit-image,youprofit/scikit-image,paalge/scikit-image,blink1073/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image
|
Add tests for image tags.
|
from skimage.io import Image
from numpy.testing import assert_equal, assert_array_equal
def test_tags():
f = Image([1, 2, 3], foo='bar', sigma='delta')
g = Image([3, 2, 1], sun='moon')
h = Image([1, 1, 1])
assert_equal(f.tags['foo'], 'bar')
assert_array_equal((g + 2).tags['sun'], 'moon')
assert_equal(h.tags, {})
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
<commit_before><commit_msg>Add tests for image tags.<commit_after>
|
from skimage.io import Image
from numpy.testing import assert_equal, assert_array_equal
def test_tags():
f = Image([1, 2, 3], foo='bar', sigma='delta')
g = Image([3, 2, 1], sun='moon')
h = Image([1, 1, 1])
assert_equal(f.tags['foo'], 'bar')
assert_array_equal((g + 2).tags['sun'], 'moon')
assert_equal(h.tags, {})
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
Add tests for image tags.from skimage.io import Image
from numpy.testing import assert_equal, assert_array_equal
def test_tags():
f = Image([1, 2, 3], foo='bar', sigma='delta')
g = Image([3, 2, 1], sun='moon')
h = Image([1, 1, 1])
assert_equal(f.tags['foo'], 'bar')
assert_array_equal((g + 2).tags['sun'], 'moon')
assert_equal(h.tags, {})
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
<commit_before><commit_msg>Add tests for image tags.<commit_after>from skimage.io import Image
from numpy.testing import assert_equal, assert_array_equal
def test_tags():
f = Image([1, 2, 3], foo='bar', sigma='delta')
g = Image([3, 2, 1], sun='moon')
h = Image([1, 1, 1])
assert_equal(f.tags['foo'], 'bar')
assert_array_equal((g + 2).tags['sun'], 'moon')
assert_equal(h.tags, {})
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
|
2fc2a07e219a60fb62edadeed00220c4b922061f
|
src/nodeconductor_saltstack/migrations/0003_add_error_message.py
|
src/nodeconductor_saltstack/migrations/0003_add_error_message.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nodeconductor_saltstack', '0002_paid_resources'),
]
operations = [
migrations.AddField(
model_name='saltstackserviceprojectlink',
name='error_message',
field=models.TextField(blank=True),
preserve_default=True,
),
]
|
Add migration for error_message field (NC-909)
|
Add migration for error_message field (NC-909)
|
Python
|
mit
|
opennode/nodeconductor-saltstack
|
Add migration for error_message field (NC-909)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nodeconductor_saltstack', '0002_paid_resources'),
]
operations = [
migrations.AddField(
model_name='saltstackserviceprojectlink',
name='error_message',
field=models.TextField(blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for error_message field (NC-909)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nodeconductor_saltstack', '0002_paid_resources'),
]
operations = [
migrations.AddField(
model_name='saltstackserviceprojectlink',
name='error_message',
field=models.TextField(blank=True),
preserve_default=True,
),
]
|
Add migration for error_message field (NC-909)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nodeconductor_saltstack', '0002_paid_resources'),
]
operations = [
migrations.AddField(
model_name='saltstackserviceprojectlink',
name='error_message',
field=models.TextField(blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration for error_message field (NC-909)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nodeconductor_saltstack', '0002_paid_resources'),
]
operations = [
migrations.AddField(
model_name='saltstackserviceprojectlink',
name='error_message',
field=models.TextField(blank=True),
preserve_default=True,
),
]
|
|
abbca8065b92cd652c2d4ddab46ab4bf574e5ccf
|
decimaltobinary.py
|
decimaltobinary.py
|
from stack import Stack
def convertToBinary(value):
remainder = Stack()
while value > 0:
rem = value % 2
remainder.push(rem)
value = value // 2
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
def myconvertTobinary(value):
remainder = Stack()
while value > 0:
rem = value % 8
remainder.push(rem)
value = value // 8
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
print convertToBinary(43)
print myconvertTobinary(25)
|
Add python script to convert decimal value to binary
|
Add python script to convert decimal value to binary
|
Python
|
mit
|
prabhugs/scripts,prabhugs/scripts
|
Add python script to convert decimal value to binary
|
from stack import Stack
def convertToBinary(value):
remainder = Stack()
while value > 0:
rem = value % 2
remainder.push(rem)
value = value // 2
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
def myconvertTobinary(value):
remainder = Stack()
while value > 0:
rem = value % 8
remainder.push(rem)
value = value // 8
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
print convertToBinary(43)
print myconvertTobinary(25)
|
<commit_before><commit_msg>Add python script to convert decimal value to binary<commit_after>
|
from stack import Stack
def convertToBinary(value):
remainder = Stack()
while value > 0:
rem = value % 2
remainder.push(rem)
value = value // 2
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
def myconvertTobinary(value):
remainder = Stack()
while value > 0:
rem = value % 8
remainder.push(rem)
value = value // 8
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
print convertToBinary(43)
print myconvertTobinary(25)
|
Add python script to convert decimal value to binaryfrom stack import Stack
def convertToBinary(value):
remainder = Stack()
while value > 0:
rem = value % 2
remainder.push(rem)
value = value // 2
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
def myconvertTobinary(value):
remainder = Stack()
while value > 0:
rem = value % 8
remainder.push(rem)
value = value // 8
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
print convertToBinary(43)
print myconvertTobinary(25)
|
<commit_before><commit_msg>Add python script to convert decimal value to binary<commit_after>from stack import Stack
def convertToBinary(value):
remainder = Stack()
while value > 0:
rem = value % 2
remainder.push(rem)
value = value // 2
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
def myconvertTobinary(value):
remainder = Stack()
while value > 0:
rem = value % 8
remainder.push(rem)
value = value // 8
binary = ''
while not remainder.isEmpty():
binary = binary + str(remainder.pop())
return binary
print convertToBinary(43)
print myconvertTobinary(25)
|
|
d0ea82dd5a4f36ec374357276093e1a91d85e54f
|
examples/basics/visuals/bezier.py
|
examples/basics/visuals/bezier.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to draw curved lines (bezier).
"""
import sys
from vispy import app, gloo, visuals
from vispy.geometry import curves
from vispy.visuals.transforms import STTransform, NullTransform
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, title='Bezier lines example',
keys='interactive', size=(400, 750))
self.lines = [
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(50, -190),
(350, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(190, -190),
(210, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve3_bezier(
(10, 0),
(30, 200),
(390, 0)
), color='w', width=2, method='agg')
]
# Translate each line visual downwards
for i, line in enumerate(self.lines):
x = 0
y = 200 * (i + 1)
line.transform = STTransform(translate=[x, y])
self.texts = [
visuals.TextVisual('4 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 75)),
visuals.TextVisual('3 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 525)),
]
for text in self.texts:
text.transform = NullTransform()
# Initialize transform systems for each visual
self.visuals = self.lines + self.texts
for visual in self.visuals:
visual.tr_sys = visuals.transforms.TransformSystem(self)
visual.tr_sys.visual_to_document = visual.transform
self.show()
def on_draw(self, event):
gloo.clear('black')
gloo.set_viewport(0, 0, *self.physical_size)
for visual in self.visuals:
visual.draw(visual.tr_sys)
if __name__ == '__main__':
win = Canvas()
if sys.flags.interactive != 1:
app.run()
|
Add an example how to draw Bezier curves.
|
Add an example how to draw Bezier curves.
The vispy.geometry.curves module provides several helper
functions to generate the right vertices for a nice curved
line.
|
Python
|
bsd-3-clause
|
sbtlaarzc/vispy,QuLogic/vispy,drufat/vispy,kkuunnddaannkk/vispy,dchilds7/Deysha-Star-Formation,dchilds7/Deysha-Star-Formation,jay3sh/vispy,jdreaver/vispy,RebeccaWPerry/vispy,RebeccaWPerry/vispy,srinathv/vispy,julienr/vispy,inclement/vispy,Eric89GXL/vispy,bollu/vispy,QuLogic/vispy,jay3sh/vispy,QuLogic/vispy,srinathv/vispy,jdreaver/vispy,bollu/vispy,srinathv/vispy,ghisvail/vispy,inclement/vispy,ghisvail/vispy,michaelaye/vispy,bollu/vispy,dchilds7/Deysha-Star-Formation,kkuunnddaannkk/vispy,kkuunnddaannkk/vispy,jay3sh/vispy,michaelaye/vispy,drufat/vispy,michaelaye/vispy,ghisvail/vispy,inclement/vispy,sbtlaarzc/vispy,sbtlaarzc/vispy,Eric89GXL/vispy,julienr/vispy,julienr/vispy,drufat/vispy,jdreaver/vispy,Eric89GXL/vispy,RebeccaWPerry/vispy
|
Add an example how to draw Bezier curves.
The vispy.geometry.curves module provides several helper
functions to generate the right vertices for a nice curved
line.
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to draw curved lines (bezier).
"""
import sys
from vispy import app, gloo, visuals
from vispy.geometry import curves
from vispy.visuals.transforms import STTransform, NullTransform
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, title='Bezier lines example',
keys='interactive', size=(400, 750))
self.lines = [
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(50, -190),
(350, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(190, -190),
(210, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve3_bezier(
(10, 0),
(30, 200),
(390, 0)
), color='w', width=2, method='agg')
]
# Translate each line visual downwards
for i, line in enumerate(self.lines):
x = 0
y = 200 * (i + 1)
line.transform = STTransform(translate=[x, y])
self.texts = [
visuals.TextVisual('4 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 75)),
visuals.TextVisual('3 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 525)),
]
for text in self.texts:
text.transform = NullTransform()
# Initialize transform systems for each visual
self.visuals = self.lines + self.texts
for visual in self.visuals:
visual.tr_sys = visuals.transforms.TransformSystem(self)
visual.tr_sys.visual_to_document = visual.transform
self.show()
def on_draw(self, event):
gloo.clear('black')
gloo.set_viewport(0, 0, *self.physical_size)
for visual in self.visuals:
visual.draw(visual.tr_sys)
if __name__ == '__main__':
win = Canvas()
if sys.flags.interactive != 1:
app.run()
|
<commit_before><commit_msg>Add an example how to draw Bezier curves.
The vispy.geometry.curves module provides several helper
functions to generate the right vertices for a nice curved
line.<commit_after>
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to draw curved lines (bezier).
"""
import sys
from vispy import app, gloo, visuals
from vispy.geometry import curves
from vispy.visuals.transforms import STTransform, NullTransform
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, title='Bezier lines example',
keys='interactive', size=(400, 750))
self.lines = [
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(50, -190),
(350, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(190, -190),
(210, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve3_bezier(
(10, 0),
(30, 200),
(390, 0)
), color='w', width=2, method='agg')
]
# Translate each line visual downwards
for i, line in enumerate(self.lines):
x = 0
y = 200 * (i + 1)
line.transform = STTransform(translate=[x, y])
self.texts = [
visuals.TextVisual('4 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 75)),
visuals.TextVisual('3 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 525)),
]
for text in self.texts:
text.transform = NullTransform()
# Initialize transform systems for each visual
self.visuals = self.lines + self.texts
for visual in self.visuals:
visual.tr_sys = visuals.transforms.TransformSystem(self)
visual.tr_sys.visual_to_document = visual.transform
self.show()
def on_draw(self, event):
gloo.clear('black')
gloo.set_viewport(0, 0, *self.physical_size)
for visual in self.visuals:
visual.draw(visual.tr_sys)
if __name__ == '__main__':
win = Canvas()
if sys.flags.interactive != 1:
app.run()
|
Add an example how to draw Bezier curves.
The vispy.geometry.curves module provides several helper
functions to generate the right vertices for a nice curved
line.# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to draw curved lines (bezier).
"""
import sys
from vispy import app, gloo, visuals
from vispy.geometry import curves
from vispy.visuals.transforms import STTransform, NullTransform
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, title='Bezier lines example',
keys='interactive', size=(400, 750))
self.lines = [
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(50, -190),
(350, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(190, -190),
(210, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve3_bezier(
(10, 0),
(30, 200),
(390, 0)
), color='w', width=2, method='agg')
]
# Translate each line visual downwards
for i, line in enumerate(self.lines):
x = 0
y = 200 * (i + 1)
line.transform = STTransform(translate=[x, y])
self.texts = [
visuals.TextVisual('4 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 75)),
visuals.TextVisual('3 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 525)),
]
for text in self.texts:
text.transform = NullTransform()
# Initialize transform systems for each visual
self.visuals = self.lines + self.texts
for visual in self.visuals:
visual.tr_sys = visuals.transforms.TransformSystem(self)
visual.tr_sys.visual_to_document = visual.transform
self.show()
def on_draw(self, event):
gloo.clear('black')
gloo.set_viewport(0, 0, *self.physical_size)
for visual in self.visuals:
visual.draw(visual.tr_sys)
if __name__ == '__main__':
win = Canvas()
if sys.flags.interactive != 1:
app.run()
|
<commit_before><commit_msg>Add an example how to draw Bezier curves.
The vispy.geometry.curves module provides several helper
functions to generate the right vertices for a nice curved
line.<commit_after># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates how to draw curved lines (bezier).
"""
import sys
from vispy import app, gloo, visuals
from vispy.geometry import curves
from vispy.visuals.transforms import STTransform, NullTransform
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, title='Bezier lines example',
keys='interactive', size=(400, 750))
self.lines = [
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(50, -190),
(350, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve4_bezier(
(10, 0),
(190, -190),
(210, 190),
(390, 0)
), color='w', width=2, method='agg'),
visuals.LineVisual(curves.curve3_bezier(
(10, 0),
(30, 200),
(390, 0)
), color='w', width=2, method='agg')
]
# Translate each line visual downwards
for i, line in enumerate(self.lines):
x = 0
y = 200 * (i + 1)
line.transform = STTransform(translate=[x, y])
self.texts = [
visuals.TextVisual('4 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 75)),
visuals.TextVisual('3 point Bezier curve', bold=True, color='w',
font_size=24, pos=(200, 525)),
]
for text in self.texts:
text.transform = NullTransform()
# Initialize transform systems for each visual
self.visuals = self.lines + self.texts
for visual in self.visuals:
visual.tr_sys = visuals.transforms.TransformSystem(self)
visual.tr_sys.visual_to_document = visual.transform
self.show()
def on_draw(self, event):
gloo.clear('black')
gloo.set_viewport(0, 0, *self.physical_size)
for visual in self.visuals:
visual.draw(visual.tr_sys)
if __name__ == '__main__':
win = Canvas()
if sys.flags.interactive != 1:
app.run()
|
|
b639aebe9689b09e879cefa3a4ff2d6bc2f1c5f1
|
sandbox/load_modules.py
|
sandbox/load_modules.py
|
import os
import glob
position = p.copy()
for module_name in position.b.split():
fileNode = position.insertAsLastChild()
fileNode.h = '@clean {0}.py'.format(module_name)
c.redraw_now()
"""
DependencyNodeUI
ComputeNodeUI
RandomUI
ExpressionUI
BoxUI
ReferenceUI
BackdropUI
DotUI
SubGraphUI
SwitchUI
ContextVariablesUI
TimeWarpUI
LoopUI
AnimationUI
"""
|
Load modules under a package directory
|
Load modules under a package directory
|
Python
|
mit
|
satishgoda/leo-editor-tutorial
|
Load modules under a package directory
|
import os
import glob
position = p.copy()
for module_name in position.b.split():
fileNode = position.insertAsLastChild()
fileNode.h = '@clean {0}.py'.format(module_name)
c.redraw_now()
"""
DependencyNodeUI
ComputeNodeUI
RandomUI
ExpressionUI
BoxUI
ReferenceUI
BackdropUI
DotUI
SubGraphUI
SwitchUI
ContextVariablesUI
TimeWarpUI
LoopUI
AnimationUI
"""
|
<commit_before><commit_msg>Load modules under a package directory<commit_after>
|
import os
import glob
position = p.copy()
for module_name in position.b.split():
fileNode = position.insertAsLastChild()
fileNode.h = '@clean {0}.py'.format(module_name)
c.redraw_now()
"""
DependencyNodeUI
ComputeNodeUI
RandomUI
ExpressionUI
BoxUI
ReferenceUI
BackdropUI
DotUI
SubGraphUI
SwitchUI
ContextVariablesUI
TimeWarpUI
LoopUI
AnimationUI
"""
|
Load modules under a package directoryimport os
import glob
position = p.copy()
for module_name in position.b.split():
fileNode = position.insertAsLastChild()
fileNode.h = '@clean {0}.py'.format(module_name)
c.redraw_now()
"""
DependencyNodeUI
ComputeNodeUI
RandomUI
ExpressionUI
BoxUI
ReferenceUI
BackdropUI
DotUI
SubGraphUI
SwitchUI
ContextVariablesUI
TimeWarpUI
LoopUI
AnimationUI
"""
|
<commit_before><commit_msg>Load modules under a package directory<commit_after>import os
import glob
position = p.copy()
for module_name in position.b.split():
fileNode = position.insertAsLastChild()
fileNode.h = '@clean {0}.py'.format(module_name)
c.redraw_now()
"""
DependencyNodeUI
ComputeNodeUI
RandomUI
ExpressionUI
BoxUI
ReferenceUI
BackdropUI
DotUI
SubGraphUI
SwitchUI
ContextVariablesUI
TimeWarpUI
LoopUI
AnimationUI
"""
|
|
ec62e6784b69ecc47106529d27b8fc6dc1ab215f
|
imaginary/test/test_idea.py
|
imaginary/test/test_idea.py
|
"""
Some basic unit tests for L{imaginary.idea} (but many tests for this code are in
other modules instead).
"""
from twisted.trial.unittest import TestCase
from epsilon.structlike import record
from imaginary.idea import Idea, Link, Path
class Named(record('name')):
pass
class PathTests(TestCase):
"""
Tests for L{imaginary.idea.Path}.
"""
def test_repr(self):
"""
A L{Path} instance can be rendered into a string by C{repr}.
"""
monitor = Idea(Named("monitor"))
desk = Idea(Named("desk"))
office = Idea(Named("office"))
path = Path([Link(office, desk), Link(desk, monitor)])
self.assertEquals(
repr(path),
"Path(\n"
"\t'office' => 'desk' []\n"
"\t'desk' => 'monitor' [])")
|
Add a test for Path.__repr__
|
Add a test for Path.__repr__
|
Python
|
mit
|
glyph/imaginary,twisted/imaginary
|
Add a test for Path.__repr__
|
"""
Some basic unit tests for L{imaginary.idea} (but many tests for this code are in
other modules instead).
"""
from twisted.trial.unittest import TestCase
from epsilon.structlike import record
from imaginary.idea import Idea, Link, Path
class Named(record('name')):
pass
class PathTests(TestCase):
"""
Tests for L{imaginary.idea.Path}.
"""
def test_repr(self):
"""
A L{Path} instance can be rendered into a string by C{repr}.
"""
monitor = Idea(Named("monitor"))
desk = Idea(Named("desk"))
office = Idea(Named("office"))
path = Path([Link(office, desk), Link(desk, monitor)])
self.assertEquals(
repr(path),
"Path(\n"
"\t'office' => 'desk' []\n"
"\t'desk' => 'monitor' [])")
|
<commit_before><commit_msg>Add a test for Path.__repr__<commit_after>
|
"""
Some basic unit tests for L{imaginary.idea} (but many tests for this code are in
other modules instead).
"""
from twisted.trial.unittest import TestCase
from epsilon.structlike import record
from imaginary.idea import Idea, Link, Path
class Named(record('name')):
pass
class PathTests(TestCase):
"""
Tests for L{imaginary.idea.Path}.
"""
def test_repr(self):
"""
A L{Path} instance can be rendered into a string by C{repr}.
"""
monitor = Idea(Named("monitor"))
desk = Idea(Named("desk"))
office = Idea(Named("office"))
path = Path([Link(office, desk), Link(desk, monitor)])
self.assertEquals(
repr(path),
"Path(\n"
"\t'office' => 'desk' []\n"
"\t'desk' => 'monitor' [])")
|
Add a test for Path.__repr__
"""
Some basic unit tests for L{imaginary.idea} (but many tests for this code are in
other modules instead).
"""
from twisted.trial.unittest import TestCase
from epsilon.structlike import record
from imaginary.idea import Idea, Link, Path
class Named(record('name')):
pass
class PathTests(TestCase):
"""
Tests for L{imaginary.idea.Path}.
"""
def test_repr(self):
"""
A L{Path} instance can be rendered into a string by C{repr}.
"""
monitor = Idea(Named("monitor"))
desk = Idea(Named("desk"))
office = Idea(Named("office"))
path = Path([Link(office, desk), Link(desk, monitor)])
self.assertEquals(
repr(path),
"Path(\n"
"\t'office' => 'desk' []\n"
"\t'desk' => 'monitor' [])")
|
<commit_before><commit_msg>Add a test for Path.__repr__<commit_after>
"""
Some basic unit tests for L{imaginary.idea} (but many tests for this code are in
other modules instead).
"""
from twisted.trial.unittest import TestCase
from epsilon.structlike import record
from imaginary.idea import Idea, Link, Path
class Named(record('name')):
pass
class PathTests(TestCase):
"""
Tests for L{imaginary.idea.Path}.
"""
def test_repr(self):
"""
A L{Path} instance can be rendered into a string by C{repr}.
"""
monitor = Idea(Named("monitor"))
desk = Idea(Named("desk"))
office = Idea(Named("office"))
path = Path([Link(office, desk), Link(desk, monitor)])
self.assertEquals(
repr(path),
"Path(\n"
"\t'office' => 'desk' []\n"
"\t'desk' => 'monitor' [])")
|
|
5af7dcb774cb52ffe09822d788d7beebd34a6ef6
|
watcher.py
|
watcher.py
|
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class ScriptModifiedHandler(PatternMatchingEventHandler):
patterns = ['*.py']
def __init__(self):
super(ScriptModifiedHandler, self).__init__()
# you can add some init code here
def process(self, event):
print(event.src_path, event.event_type)
def on_modified(self, event):
self.process(event)
def on_moved(self, event):
pass
def on_deleted(self, event):
pass
def on_created(self, event):
pass
if __name__ == '__main__':
observer = Observer()
path = '.'
event_handler = ScriptModifiedHandler()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
Add watchdog that monitors scripts editing
|
Add watchdog that monitors scripts editing
|
Python
|
mit
|
duboviy/misc
|
Add watchdog that monitors scripts editing
|
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class ScriptModifiedHandler(PatternMatchingEventHandler):
patterns = ['*.py']
def __init__(self):
super(ScriptModifiedHandler, self).__init__()
# you can add some init code here
def process(self, event):
print(event.src_path, event.event_type)
def on_modified(self, event):
self.process(event)
def on_moved(self, event):
pass
def on_deleted(self, event):
pass
def on_created(self, event):
pass
if __name__ == '__main__':
observer = Observer()
path = '.'
event_handler = ScriptModifiedHandler()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
<commit_before><commit_msg>Add watchdog that monitors scripts editing<commit_after>
|
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class ScriptModifiedHandler(PatternMatchingEventHandler):
patterns = ['*.py']
def __init__(self):
super(ScriptModifiedHandler, self).__init__()
# you can add some init code here
def process(self, event):
print(event.src_path, event.event_type)
def on_modified(self, event):
self.process(event)
def on_moved(self, event):
pass
def on_deleted(self, event):
pass
def on_created(self, event):
pass
if __name__ == '__main__':
observer = Observer()
path = '.'
event_handler = ScriptModifiedHandler()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
Add watchdog that monitors scripts editingimport time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class ScriptModifiedHandler(PatternMatchingEventHandler):
patterns = ['*.py']
def __init__(self):
super(ScriptModifiedHandler, self).__init__()
# you can add some init code here
def process(self, event):
print(event.src_path, event.event_type)
def on_modified(self, event):
self.process(event)
def on_moved(self, event):
pass
def on_deleted(self, event):
pass
def on_created(self, event):
pass
if __name__ == '__main__':
observer = Observer()
path = '.'
event_handler = ScriptModifiedHandler()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
<commit_before><commit_msg>Add watchdog that monitors scripts editing<commit_after>import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
class ScriptModifiedHandler(PatternMatchingEventHandler):
patterns = ['*.py']
def __init__(self):
super(ScriptModifiedHandler, self).__init__()
# you can add some init code here
def process(self, event):
print(event.src_path, event.event_type)
def on_modified(self, event):
self.process(event)
def on_moved(self, event):
pass
def on_deleted(self, event):
pass
def on_created(self, event):
pass
if __name__ == '__main__':
observer = Observer()
path = '.'
event_handler = ScriptModifiedHandler()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
|
4b9ee24e05b0ac73c7ca9fa17d50d11b1c8a95df
|
test/5232/fileinto_test.py
|
test/5232/fileinto_test.py
|
import sys
sys.path.append('./')
import unittest
import checksieve
class TestFileinto(unittest.TestCase):
def test_fileinto_with_flags(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "\\\\Seen" "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_list(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags ["\\\\Seen", "\\\\Deleted"] "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_without_arguments(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_fileinto_with_flag_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags "\\\\Seen" :copy "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flag_without_arguments_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags :copy "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
|
Add failing tests for fileinto command with :flags option
|
Add failing tests for fileinto command with :flags option
|
Python
|
mit
|
dburkart/check-sieve,dburkart/check-sieve,dburkart/check-sieve,dburkart/mail-sieve-verifier,dburkart/mail-sieve-verifier
|
Add failing tests for fileinto command with :flags option
|
import sys
sys.path.append('./')
import unittest
import checksieve
class TestFileinto(unittest.TestCase):
def test_fileinto_with_flags(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "\\\\Seen" "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_list(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags ["\\\\Seen", "\\\\Deleted"] "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_without_arguments(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_fileinto_with_flag_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags "\\\\Seen" :copy "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flag_without_arguments_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags :copy "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add failing tests for fileinto command with :flags option<commit_after>
|
import sys
sys.path.append('./')
import unittest
import checksieve
class TestFileinto(unittest.TestCase):
def test_fileinto_with_flags(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "\\\\Seen" "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_list(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags ["\\\\Seen", "\\\\Deleted"] "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_without_arguments(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_fileinto_with_flag_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags "\\\\Seen" :copy "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flag_without_arguments_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags :copy "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
|
Add failing tests for fileinto command with :flags optionimport sys
sys.path.append('./')
import unittest
import checksieve
class TestFileinto(unittest.TestCase):
def test_fileinto_with_flags(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "\\\\Seen" "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_list(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags ["\\\\Seen", "\\\\Deleted"] "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_without_arguments(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_fileinto_with_flag_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags "\\\\Seen" :copy "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flag_without_arguments_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags :copy "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add failing tests for fileinto command with :flags option<commit_after>import sys
sys.path.append('./')
import unittest
import checksieve
class TestFileinto(unittest.TestCase):
def test_fileinto_with_flags(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "\\\\Seen" "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_list(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags ["\\\\Seen", "\\\\Deleted"] "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flags_without_arguments(self):
sieve = '''
require ["fileinto", "imap4flags"];
fileinto :flags "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
def test_fileinto_with_flag_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags "\\\\Seen" :copy "Mailbox";
'''
self.assertFalse(checksieve.parse_string(sieve, False))
def test_fileinto_with_flag_without_arguments_and_copy(self):
sieve = '''
require ["fileinto", "imap4flags", "copy"];
fileinto :flags :copy "Mailbox";
'''
self.assertTrue(checksieve.parse_string(sieve, True))
if __name__ == '__main__':
unittest.main()
|
|
ee057041b9be696173ab900ca6f44f72785777dd
|
VehicleDetectionTracking/car_notcar.py
|
VehicleDetectionTracking/car_notcar.py
|
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
#from skimage.feature import hog
#from skimage import color, exposure
# images are divided up into vehicles and non-vehicles
os.chdir('./objects')
# Define a function to return some characteristics of the dataset
def data_look(car_list, notcar_list):
data_dict = {}
# Define a key in data_dict "n_cars" and store the number of car images
data_dict["n_cars"] = len(car_list)
# Define a key "n_notcars" and store the number of notcar images
data_dict["n_notcars"] = len(notcar_list)
# Read in a test image, either car or notcar
test_img = mpimg.imread(car_list[0])
# Define a key "image_shape" and store the test image shape 3-tuple
data_dict["image_shape"] = test_img.shape
# Define a key "data_type" and store the data type of the test image.
data_dict["data_type"] = test_img.dtype
# Return data_dict
return data_dict
def main():
images = glob.glob('*.jpeg')
cars = []
notcars = []
for image in images:
if 'image' in image or 'extra' in image:
notcars.append(image)
else:
cars.append(image)
data_info = data_look(cars, notcars)
# Just for fun choose random car / not-car indices and plot example images
car_ind = np.random.randint(0, len(cars))
notcar_ind = np.random.randint(0, len(notcars))
# Read in car / not-car images
car_image = mpimg.imread(cars[car_ind])
notcar_image = mpimg.imread(notcars[notcar_ind])
# Plot the examples
fig = plt.figure()
plt.subplot(121)
plt.imshow(car_image)
plt.title('Example Car Image')
plt.subplot(122)
plt.imshow(notcar_image)
plt.title('Example Not-car Image')
plt.show()
if __name__ == '__main__':
main()
|
Add scripts which define a function to return some characteristics of the dataset
|
feat: Add scripts which define a function to return some characteristics of the dataset
|
Python
|
mit
|
aguijarro/SelfDrivingCar
|
feat: Add scripts which define a function to return some characteristics of the dataset
|
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
#from skimage.feature import hog
#from skimage import color, exposure
# images are divided up into vehicles and non-vehicles
os.chdir('./objects')
# Define a function to return some characteristics of the dataset
def data_look(car_list, notcar_list):
data_dict = {}
# Define a key in data_dict "n_cars" and store the number of car images
data_dict["n_cars"] = len(car_list)
# Define a key "n_notcars" and store the number of notcar images
data_dict["n_notcars"] = len(notcar_list)
# Read in a test image, either car or notcar
test_img = mpimg.imread(car_list[0])
# Define a key "image_shape" and store the test image shape 3-tuple
data_dict["image_shape"] = test_img.shape
# Define a key "data_type" and store the data type of the test image.
data_dict["data_type"] = test_img.dtype
# Return data_dict
return data_dict
def main():
images = glob.glob('*.jpeg')
cars = []
notcars = []
for image in images:
if 'image' in image or 'extra' in image:
notcars.append(image)
else:
cars.append(image)
data_info = data_look(cars, notcars)
# Just for fun choose random car / not-car indices and plot example images
car_ind = np.random.randint(0, len(cars))
notcar_ind = np.random.randint(0, len(notcars))
# Read in car / not-car images
car_image = mpimg.imread(cars[car_ind])
notcar_image = mpimg.imread(notcars[notcar_ind])
# Plot the examples
fig = plt.figure()
plt.subplot(121)
plt.imshow(car_image)
plt.title('Example Car Image')
plt.subplot(122)
plt.imshow(notcar_image)
plt.title('Example Not-car Image')
plt.show()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>feat: Add scripts which define a function to return some characteristics of the dataset<commit_after>
|
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
#from skimage.feature import hog
#from skimage import color, exposure
# images are divided up into vehicles and non-vehicles
os.chdir('./objects')
# Define a function to return some characteristics of the dataset
def data_look(car_list, notcar_list):
data_dict = {}
# Define a key in data_dict "n_cars" and store the number of car images
data_dict["n_cars"] = len(car_list)
# Define a key "n_notcars" and store the number of notcar images
data_dict["n_notcars"] = len(notcar_list)
# Read in a test image, either car or notcar
test_img = mpimg.imread(car_list[0])
# Define a key "image_shape" and store the test image shape 3-tuple
data_dict["image_shape"] = test_img.shape
# Define a key "data_type" and store the data type of the test image.
data_dict["data_type"] = test_img.dtype
# Return data_dict
return data_dict
def main():
images = glob.glob('*.jpeg')
cars = []
notcars = []
for image in images:
if 'image' in image or 'extra' in image:
notcars.append(image)
else:
cars.append(image)
data_info = data_look(cars, notcars)
# Just for fun choose random car / not-car indices and plot example images
car_ind = np.random.randint(0, len(cars))
notcar_ind = np.random.randint(0, len(notcars))
# Read in car / not-car images
car_image = mpimg.imread(cars[car_ind])
notcar_image = mpimg.imread(notcars[notcar_ind])
# Plot the examples
fig = plt.figure()
plt.subplot(121)
plt.imshow(car_image)
plt.title('Example Car Image')
plt.subplot(122)
plt.imshow(notcar_image)
plt.title('Example Not-car Image')
plt.show()
if __name__ == '__main__':
main()
|
feat: Add scripts which define a function to return some characteristics of the datasetimport matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
#from skimage.feature import hog
#from skimage import color, exposure
# images are divided up into vehicles and non-vehicles
os.chdir('./objects')
# Define a function to return some characteristics of the dataset
def data_look(car_list, notcar_list):
data_dict = {}
# Define a key in data_dict "n_cars" and store the number of car images
data_dict["n_cars"] = len(car_list)
# Define a key "n_notcars" and store the number of notcar images
data_dict["n_notcars"] = len(notcar_list)
# Read in a test image, either car or notcar
test_img = mpimg.imread(car_list[0])
# Define a key "image_shape" and store the test image shape 3-tuple
data_dict["image_shape"] = test_img.shape
# Define a key "data_type" and store the data type of the test image.
data_dict["data_type"] = test_img.dtype
# Return data_dict
return data_dict
def main():
images = glob.glob('*.jpeg')
cars = []
notcars = []
for image in images:
if 'image' in image or 'extra' in image:
notcars.append(image)
else:
cars.append(image)
data_info = data_look(cars, notcars)
# Just for fun choose random car / not-car indices and plot example images
car_ind = np.random.randint(0, len(cars))
notcar_ind = np.random.randint(0, len(notcars))
# Read in car / not-car images
car_image = mpimg.imread(cars[car_ind])
notcar_image = mpimg.imread(notcars[notcar_ind])
# Plot the examples
fig = plt.figure()
plt.subplot(121)
plt.imshow(car_image)
plt.title('Example Car Image')
plt.subplot(122)
plt.imshow(notcar_image)
plt.title('Example Not-car Image')
plt.show()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>feat: Add scripts which define a function to return some characteristics of the dataset<commit_after>import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import os
#from skimage.feature import hog
#from skimage import color, exposure
# images are divided up into vehicles and non-vehicles
os.chdir('./objects')
# Define a function to return some characteristics of the dataset
def data_look(car_list, notcar_list):
data_dict = {}
# Define a key in data_dict "n_cars" and store the number of car images
data_dict["n_cars"] = len(car_list)
# Define a key "n_notcars" and store the number of notcar images
data_dict["n_notcars"] = len(notcar_list)
# Read in a test image, either car or notcar
test_img = mpimg.imread(car_list[0])
# Define a key "image_shape" and store the test image shape 3-tuple
data_dict["image_shape"] = test_img.shape
# Define a key "data_type" and store the data type of the test image.
data_dict["data_type"] = test_img.dtype
# Return data_dict
return data_dict
def main():
images = glob.glob('*.jpeg')
cars = []
notcars = []
for image in images:
if 'image' in image or 'extra' in image:
notcars.append(image)
else:
cars.append(image)
data_info = data_look(cars, notcars)
# Just for fun choose random car / not-car indices and plot example images
car_ind = np.random.randint(0, len(cars))
notcar_ind = np.random.randint(0, len(notcars))
# Read in car / not-car images
car_image = mpimg.imread(cars[car_ind])
notcar_image = mpimg.imread(notcars[notcar_ind])
# Plot the examples
fig = plt.figure()
plt.subplot(121)
plt.imshow(car_image)
plt.title('Example Car Image')
plt.subplot(122)
plt.imshow(notcar_image)
plt.title('Example Not-car Image')
plt.show()
if __name__ == '__main__':
main()
|
|
de999808e8d4996ca1a06a2e484dac3acc0d3fe5
|
vandali/urls.py
|
vandali/urls.py
|
from django.conf.urls import patterns, url
from vandali import views
urlpatterns = patterns(
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^portfolio/$', views.portfolio, name='portfolio'),
url(r'^contact/$', views.blog, name='blog'),
url(r'^blog/$', views.blog, name='blog'),
)
|
Set routes for home, about, portfolio, contact, blog
|
Set routes for home, about, portfolio, contact, blog
|
Python
|
bsd-2-clause
|
aldnav/vandali,aldnav/vandali
|
Set routes for home, about, portfolio, contact, blog
|
from django.conf.urls import patterns, url
from vandali import views
urlpatterns = patterns(
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^portfolio/$', views.portfolio, name='portfolio'),
url(r'^contact/$', views.blog, name='blog'),
url(r'^blog/$', views.blog, name='blog'),
)
|
<commit_before><commit_msg>Set routes for home, about, portfolio, contact, blog<commit_after>
|
from django.conf.urls import patterns, url
from vandali import views
urlpatterns = patterns(
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^portfolio/$', views.portfolio, name='portfolio'),
url(r'^contact/$', views.blog, name='blog'),
url(r'^blog/$', views.blog, name='blog'),
)
|
Set routes for home, about, portfolio, contact, blogfrom django.conf.urls import patterns, url
from vandali import views
urlpatterns = patterns(
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^portfolio/$', views.portfolio, name='portfolio'),
url(r'^contact/$', views.blog, name='blog'),
url(r'^blog/$', views.blog, name='blog'),
)
|
<commit_before><commit_msg>Set routes for home, about, portfolio, contact, blog<commit_after>from django.conf.urls import patterns, url
from vandali import views
urlpatterns = patterns(
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^portfolio/$', views.portfolio, name='portfolio'),
url(r'^contact/$', views.blog, name='blog'),
url(r'^blog/$', views.blog, name='blog'),
)
|
|
6b8a569c4196f79b06619f392faf008fb63d0909
|
votingrecord.py
|
votingrecord.py
|
import collections
import prettytable
import yaml
import library
"""Generate governance repo voting record."""
def print_voting_record(change_ids, repo):
headers = ["Subject", "Link"]
names = collections.defaultdict(dict) # {name: {number:vote, },}
total_votes = collections.defaultdict(int) # {name: count}
changes = dict() # {number: subject, }
for details in library.get_change_details(change_ids, repo):
number = details['_number']
changes[number] = details['subject']
for user in details['labels']['Code-Review']['all']:
if user['value'] in (1, -1) and user['name'] != "Jenkins":
names[user['name']][number] = user['value']
total_votes[user['name']] += 1
# build table
x = prettytable.PrettyTable(headers + sorted(names),
hrules=prettytable.ALL)
# total votes
row = ["total votes", "N/A"]
for name in sorted(total_votes):
row.append(total_votes[name])
x.add_row(row)
for number in changes:
row = [changes[number],
"https://review.openstack.org/#/c/%s/" % number]
for name in sorted(names):
if names[name].get(number) is not None:
row.append(names[name][number])
else:
row.append(" ")
x.add_row(row)
print x.get_html_string(format=True)
def main():
config = yaml.load(open('config.yaml', 'r'))
repo = "openstack/governance"
path = config['path'] + repo
change_ids = library.get_change_ids(path, since="5.months")
change_ids = change_ids[:config['limit']]
print_voting_record(change_ids, repo)
if __name__ == '__main__':
main()
|
Add script to generate voting record for governance repo
|
Add script to generate voting record for governance repo
The OpenStack TC now votes via gerrit, so we can easily track voting
records.
|
Python
|
apache-2.0
|
jogo/gerrit-fun
|
Add script to generate voting record for governance repo
The OpenStack TC now votes via gerrit, so we can easily track voting
records.
|
import collections
import prettytable
import yaml
import library
"""Generate governance repo voting record."""
def print_voting_record(change_ids, repo):
headers = ["Subject", "Link"]
names = collections.defaultdict(dict) # {name: {number:vote, },}
total_votes = collections.defaultdict(int) # {name: count}
changes = dict() # {number: subject, }
for details in library.get_change_details(change_ids, repo):
number = details['_number']
changes[number] = details['subject']
for user in details['labels']['Code-Review']['all']:
if user['value'] in (1, -1) and user['name'] != "Jenkins":
names[user['name']][number] = user['value']
total_votes[user['name']] += 1
# build table
x = prettytable.PrettyTable(headers + sorted(names),
hrules=prettytable.ALL)
# total votes
row = ["total votes", "N/A"]
for name in sorted(total_votes):
row.append(total_votes[name])
x.add_row(row)
for number in changes:
row = [changes[number],
"https://review.openstack.org/#/c/%s/" % number]
for name in sorted(names):
if names[name].get(number) is not None:
row.append(names[name][number])
else:
row.append(" ")
x.add_row(row)
print x.get_html_string(format=True)
def main():
config = yaml.load(open('config.yaml', 'r'))
repo = "openstack/governance"
path = config['path'] + repo
change_ids = library.get_change_ids(path, since="5.months")
change_ids = change_ids[:config['limit']]
print_voting_record(change_ids, repo)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate voting record for governance repo
The OpenStack TC now votes via gerrit, so we can easily track voting
records.<commit_after>
|
import collections
import prettytable
import yaml
import library
"""Generate governance repo voting record."""
def print_voting_record(change_ids, repo):
headers = ["Subject", "Link"]
names = collections.defaultdict(dict) # {name: {number:vote, },}
total_votes = collections.defaultdict(int) # {name: count}
changes = dict() # {number: subject, }
for details in library.get_change_details(change_ids, repo):
number = details['_number']
changes[number] = details['subject']
for user in details['labels']['Code-Review']['all']:
if user['value'] in (1, -1) and user['name'] != "Jenkins":
names[user['name']][number] = user['value']
total_votes[user['name']] += 1
# build table
x = prettytable.PrettyTable(headers + sorted(names),
hrules=prettytable.ALL)
# total votes
row = ["total votes", "N/A"]
for name in sorted(total_votes):
row.append(total_votes[name])
x.add_row(row)
for number in changes:
row = [changes[number],
"https://review.openstack.org/#/c/%s/" % number]
for name in sorted(names):
if names[name].get(number) is not None:
row.append(names[name][number])
else:
row.append(" ")
x.add_row(row)
print x.get_html_string(format=True)
def main():
config = yaml.load(open('config.yaml', 'r'))
repo = "openstack/governance"
path = config['path'] + repo
change_ids = library.get_change_ids(path, since="5.months")
change_ids = change_ids[:config['limit']]
print_voting_record(change_ids, repo)
if __name__ == '__main__':
main()
|
Add script to generate voting record for governance repo
The OpenStack TC now votes via gerrit, so we can easily track voting
records.import collections
import prettytable
import yaml
import library
"""Generate governance repo voting record."""
def print_voting_record(change_ids, repo):
headers = ["Subject", "Link"]
names = collections.defaultdict(dict) # {name: {number:vote, },}
total_votes = collections.defaultdict(int) # {name: count}
changes = dict() # {number: subject, }
for details in library.get_change_details(change_ids, repo):
number = details['_number']
changes[number] = details['subject']
for user in details['labels']['Code-Review']['all']:
if user['value'] in (1, -1) and user['name'] != "Jenkins":
names[user['name']][number] = user['value']
total_votes[user['name']] += 1
# build table
x = prettytable.PrettyTable(headers + sorted(names),
hrules=prettytable.ALL)
# total votes
row = ["total votes", "N/A"]
for name in sorted(total_votes):
row.append(total_votes[name])
x.add_row(row)
for number in changes:
row = [changes[number],
"https://review.openstack.org/#/c/%s/" % number]
for name in sorted(names):
if names[name].get(number) is not None:
row.append(names[name][number])
else:
row.append(" ")
x.add_row(row)
print x.get_html_string(format=True)
def main():
config = yaml.load(open('config.yaml', 'r'))
repo = "openstack/governance"
path = config['path'] + repo
change_ids = library.get_change_ids(path, since="5.months")
change_ids = change_ids[:config['limit']]
print_voting_record(change_ids, repo)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate voting record for governance repo
The OpenStack TC now votes via gerrit, so we can easily track voting
records.<commit_after>import collections
import prettytable
import yaml
import library
"""Generate governance repo voting record."""
def print_voting_record(change_ids, repo):
headers = ["Subject", "Link"]
names = collections.defaultdict(dict) # {name: {number:vote, },}
total_votes = collections.defaultdict(int) # {name: count}
changes = dict() # {number: subject, }
for details in library.get_change_details(change_ids, repo):
number = details['_number']
changes[number] = details['subject']
for user in details['labels']['Code-Review']['all']:
if user['value'] in (1, -1) and user['name'] != "Jenkins":
names[user['name']][number] = user['value']
total_votes[user['name']] += 1
# build table
x = prettytable.PrettyTable(headers + sorted(names),
hrules=prettytable.ALL)
# total votes
row = ["total votes", "N/A"]
for name in sorted(total_votes):
row.append(total_votes[name])
x.add_row(row)
for number in changes:
row = [changes[number],
"https://review.openstack.org/#/c/%s/" % number]
for name in sorted(names):
if names[name].get(number) is not None:
row.append(names[name][number])
else:
row.append(" ")
x.add_row(row)
print x.get_html_string(format=True)
def main():
config = yaml.load(open('config.yaml', 'r'))
repo = "openstack/governance"
path = config['path'] + repo
change_ids = library.get_change_ids(path, since="5.months")
change_ids = change_ids[:config['limit']]
print_voting_record(change_ids, repo)
if __name__ == '__main__':
main()
|
|
0db77c23b81f92b16464733404b4966d7a584f46
|
sklearn/tests/test_kernel_ridge.py
|
sklearn/tests/test_kernel_ridge.py
|
import numpy as np
from sklearn.datasets import make_classification
from sklearn.linear_model import Ridge
from sklearn.kernel_ridge import KernelRidge
from sklearn.utils.testing import assert_array_almost_equal
X, y = make_classification(n_classes=2, random_state=0)
Y = np.array([y, y]).T
def test_kernel_ridge():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed():
K = np.dot(X, X.T)
pred = KernelRidge(kernel="linear").fit(X, y).predict(X)
pred2 = KernelRidge(kernel="precomputed").fit(K, y).predict(K)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed_sample_weight():
K = np.dot(X, X.T)
K2 = K.copy()
sw = np.ones(X.shape[0]) / float(X.shape[0])
KernelRidge(kernel="precomputed").fit(K, y, sample_weight=sw)
assert_array_almost_equal(K, K2)
def test_kernel_ridge_multi_output():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, Y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, Y).predict(X)
assert_array_almost_equal(pred, pred2)
pred3 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
pred3 = np.array([pred3, pred3]).T
assert_array_almost_equal(pred2, pred3)
|
TEST Added tests for KernelRidge based on @mblondel's code in lightning
|
TEST Added tests for KernelRidge based on @mblondel's code in lightning
|
Python
|
bsd-3-clause
|
ky822/scikit-learn,IshankGulati/scikit-learn,ElDeveloper/scikit-learn,robin-lai/scikit-learn,nvoron23/scikit-learn,466152112/scikit-learn,loli/semisupervisedforests,sumspr/scikit-learn,pypot/scikit-learn,Sentient07/scikit-learn,jereze/scikit-learn,MohammedWasim/scikit-learn,kevin-intel/scikit-learn,ishanic/scikit-learn,jpautom/scikit-learn,terkkila/scikit-learn,hdmetor/scikit-learn,MatthieuBizien/scikit-learn,mojoboss/scikit-learn,theoryno3/scikit-learn,sanketloke/scikit-learn,pianomania/scikit-learn,mhdella/scikit-learn,marcocaccin/scikit-learn,shusenl/scikit-learn,Sentient07/scikit-learn,jorik041/scikit-learn,lesteve/scikit-learn,hsuantien/scikit-learn,UNR-AERIAL/scikit-learn,kashif/scikit-learn,moutai/scikit-learn,ashhher3/scikit-learn,nrhine1/scikit-learn,ky822/scikit-learn,murali-munna/scikit-learn,jm-begon/scikit-learn,jkarnows/scikit-learn,ZENGXH/scikit-learn,zaxtax/scikit-learn,pypot/scikit-learn,glemaitre/scikit-learn,Myasuka/scikit-learn,Nyker510/scikit-learn,iismd17/scikit-learn,olologin/scikit-learn,davidgbe/scikit-learn,jblackburne/scikit-learn,aabadie/scikit-learn,andaag/scikit-learn,mjgrav2001/scikit-learn,vigilv/scikit-learn,pythonvietnam/scikit-learn,jjx02230808/project0223,robin-lai/scikit-learn,pv/scikit-learn,mattgiguere/scikit-learn,cwu2011/scikit-learn,mehdidc/scikit-learn,yyjiang/scikit-learn,xuewei4d/scikit-learn,shikhardb/scikit-learn,AlexRobson/scikit-learn,glemaitre/scikit-learn,ycaihua/scikit-learn,sanketloke/scikit-learn,aetilley/scikit-learn,altairpearl/scikit-learn,walterreade/scikit-learn,TomDLT/scikit-learn,arjoly/scikit-learn,chrisburr/scikit-learn,mlyundin/scikit-learn,nelson-liu/scikit-learn,alexsavio/scikit-learn,Windy-Ground/scikit-learn,pkruskal/scikit-learn,mikebenfield/scikit-learn,AlexanderFabisch/scikit-learn,UNR-AERIAL/scikit-learn,mojoboss/scikit-learn,icdishb/scikit-learn,macks22/scikit-learn,glemaitre/scikit-learn,fabioticconi/scikit-learn,potash/scikit-learn,AlexanderFabisch/scikit-learn,Barmaley-exe/scikit-learn,RomainBrault/scikit-learn,wzbozon/scikit-learn,zuku1985/scikit-learn,jakirkham/scikit-learn,sarahgrogan/scikit-learn,jakobworldpeace/scikit-learn,cl4rke/scikit-learn,aewhatley/scikit-learn,bhargav/scikit-learn,rvraghav93/scikit-learn,mhue/scikit-learn,etkirsch/scikit-learn,pratapvardhan/scikit-learn,russel1237/scikit-learn,ephes/scikit-learn,glouppe/scikit-learn,saiwing-yeung/scikit-learn,ssaeger/scikit-learn,walterreade/scikit-learn,ashhher3/scikit-learn,tomlof/scikit-learn,shahankhatch/scikit-learn,lenovor/scikit-learn,Srisai85/scikit-learn,fabianp/scikit-learn,eg-zhang/scikit-learn,MartinDelzant/scikit-learn,IndraVikas/scikit-learn,scikit-learn/scikit-learn,bikong2/scikit-learn,imaculate/scikit-learn,justincassidy/scikit-learn,466152112/scikit-learn,chrsrds/scikit-learn,bhargav/scikit-learn,joshloyal/scikit-learn,eg-zhang/scikit-learn,Obus/scikit-learn,JosmanPS/scikit-learn,gotomypc/scikit-learn,fredhusser/scikit-learn,massmutual/scikit-learn,jblackburne/scikit-learn,ssaeger/scikit-learn,ishanic/scikit-learn,NunoEdgarGub1/scikit-learn,shangwuhencc/scikit-learn,espg/scikit-learn,krez13/scikit-learn,madjelan/scikit-learn,nmayorov/scikit-learn,Djabbz/scikit-learn,wzbozon/scikit-learn,loli/semisupervisedforests,rvraghav93/scikit-learn,nesterione/scikit-learn,cauchycui/scikit-learn,potash/scikit-learn,sonnyhu/scikit-learn,ogrisel/scikit-learn,Akshay0724/scikit-learn,ilo10/scikit-learn,dhruv13J/scikit-learn,siutanwong/scikit-learn,AIML/scikit-learn,jorik041/scikit-learn,petosegan/scikit-learn,qifeigit/scikit-learn,iismd17/scikit-learn,Nyker510/scikit-learn,dhruv13J/scikit-learn,potash/scikit-learn,shenzebang/scikit-learn,Windy-Ground/scikit-learn,frank-tancf/scikit-learn,manashmndl/scikit-learn,vibhorag/scikit-learn,nmayorov/scikit-learn,wlamond/scikit-learn,zuku1985/scikit-learn,adamgreenhall/scikit-learn,appapantula/scikit-learn,michigraber/scikit-learn,kylerbrown/scikit-learn,krez13/scikit-learn,schets/scikit-learn,nvoron23/scikit-learn,anirudhjayaraman/scikit-learn,arahuja/scikit-learn,RPGOne/scikit-learn,sumspr/scikit-learn,zorojean/scikit-learn,ishanic/scikit-learn,mehdidc/scikit-learn,jaidevd/scikit-learn,rrohan/scikit-learn,rvraghav93/scikit-learn,michigraber/scikit-learn,manashmndl/scikit-learn,Obus/scikit-learn,simon-pepin/scikit-learn,cauchycui/scikit-learn,jakirkham/scikit-learn,amueller/scikit-learn,vivekmishra1991/scikit-learn,bhargav/scikit-learn,mattilyra/scikit-learn,f3r/scikit-learn,zuku1985/scikit-learn,espg/scikit-learn,kylerbrown/scikit-learn,vibhorag/scikit-learn,hugobowne/scikit-learn,ElDeveloper/scikit-learn,nesterione/scikit-learn,ldirer/scikit-learn,smartscheduling/scikit-learn-categorical-tree,TomDLT/scikit-learn,arjoly/scikit-learn,henrykironde/scikit-learn,xubenben/scikit-learn,simon-pepin/scikit-learn,Djabbz/scikit-learn,TomDLT/scikit-learn,walterreade/scikit-learn,alexeyum/scikit-learn,mehdidc/scikit-learn,OshynSong/scikit-learn,adamgreenhall/scikit-learn,chrsrds/scikit-learn,IssamLaradji/scikit-learn,fbagirov/scikit-learn,mhdella/scikit-learn,jmschrei/scikit-learn,btabibian/scikit-learn,q1ang/scikit-learn,fredhusser/scikit-learn,nikitasingh981/scikit-learn,LohithBlaze/scikit-learn,cybernet14/scikit-learn,mfjb/scikit-learn,PrashntS/scikit-learn,xzh86/scikit-learn,henrykironde/scikit-learn,yonglehou/scikit-learn,harshaneelhg/scikit-learn,imaculate/scikit-learn,Fireblend/scikit-learn,alexsavio/scikit-learn,mfjb/scikit-learn,pompiduskus/scikit-learn,CforED/Machine-Learning,rohanp/scikit-learn,sinhrks/scikit-learn,lbishal/scikit-learn,toastedcornflakes/scikit-learn,ilo10/scikit-learn,pianomania/scikit-learn,shyamalschandra/scikit-learn,yyjiang/scikit-learn,yanlend/scikit-learn,anurag313/scikit-learn,AlexandreAbraham/scikit-learn,tdhopper/scikit-learn,tmhm/scikit-learn,liangz0707/scikit-learn,untom/scikit-learn,shusenl/scikit-learn,shenzebang/scikit-learn,dingocuster/scikit-learn,kaichogami/scikit-learn,shikhardb/scikit-learn,theoryno3/scikit-learn,jayflo/scikit-learn,poryfly/scikit-learn,fabianp/scikit-learn,JsNoNo/scikit-learn,icdishb/scikit-learn,ssaeger/scikit-learn,arjoly/scikit-learn,russel1237/scikit-learn,jzt5132/scikit-learn,IssamLaradji/scikit-learn,mattgiguere/scikit-learn,theoryno3/scikit-learn,kevin-intel/scikit-learn,Vimos/scikit-learn,manhhomienbienthuy/scikit-learn,mhue/scikit-learn,IshankGulati/scikit-learn,jakobworldpeace/scikit-learn,DonBeo/scikit-learn,davidgbe/scikit-learn,moutai/scikit-learn,Myasuka/scikit-learn,clemkoa/scikit-learn,AIML/scikit-learn,lbishal/scikit-learn,toastedcornflakes/scikit-learn,YinongLong/scikit-learn,ndingwall/scikit-learn,ngoix/OCRF,voxlol/scikit-learn,ahoyosid/scikit-learn,Myasuka/scikit-learn,rohanp/scikit-learn,mikebenfield/scikit-learn,belltailjp/scikit-learn,nikitasingh981/scikit-learn,lazywei/scikit-learn,rexshihaoren/scikit-learn,etkirsch/scikit-learn,aetilley/scikit-learn,ZenDevelopmentSystems/scikit-learn,russel1237/scikit-learn,yask123/scikit-learn,harshaneelhg/scikit-learn,etkirsch/scikit-learn,fyffyt/scikit-learn,MohammedWasim/scikit-learn,mjudsp/Tsallis,hlin117/scikit-learn,trungnt13/scikit-learn,sumspr/scikit-learn,NunoEdgarGub1/scikit-learn,ogrisel/scikit-learn,sergeyf/scikit-learn,harshaneelhg/scikit-learn,arahuja/scikit-learn,yask123/scikit-learn,huzq/scikit-learn,saiwing-yeung/scikit-learn,deepesch/scikit-learn,lin-credible/scikit-learn,siutanwong/scikit-learn,andrewnc/scikit-learn,IndraVikas/scikit-learn,liyu1990/sklearn,fbagirov/scikit-learn,ilyes14/scikit-learn,anirudhjayaraman/scikit-learn,cybernet14/scikit-learn,clemkoa/scikit-learn,appapantula/scikit-learn,pompiduskus/scikit-learn,hainm/scikit-learn,vortex-ape/scikit-learn,manhhomienbienthuy/scikit-learn,chrisburr/scikit-learn,olologin/scikit-learn,shikhardb/scikit-learn,hugobowne/scikit-learn,procoder317/scikit-learn,DonBeo/scikit-learn,thientu/scikit-learn,dsquareindia/scikit-learn,imaculate/scikit-learn,aminert/scikit-learn,IssamLaradji/scikit-learn,pythonvietnam/scikit-learn,zorojean/scikit-learn,herilalaina/scikit-learn,luo66/scikit-learn,IshankGulati/scikit-learn,cauchycui/scikit-learn,mattilyra/scikit-learn,akionakamura/scikit-learn,kashif/scikit-learn,hrjn/scikit-learn,Achuth17/scikit-learn,mugizico/scikit-learn,vermouthmjl/scikit-learn,ldirer/scikit-learn,pnedunuri/scikit-learn,MartinDelzant/scikit-learn,idlead/scikit-learn,ky822/scikit-learn,nhejazi/scikit-learn,abimannans/scikit-learn,lesteve/scikit-learn,pnedunuri/scikit-learn,lazywei/scikit-learn,walterreade/scikit-learn,yask123/scikit-learn,roxyboy/scikit-learn,AlexRobson/scikit-learn,scikit-learn/scikit-learn,henridwyer/scikit-learn,DSLituiev/scikit-learn,NunoEdgarGub1/scikit-learn,fyffyt/scikit-learn,jkarnows/scikit-learn,toastedcornflakes/scikit-learn,bigdataelephants/scikit-learn,mjudsp/Tsallis,qifeigit/scikit-learn,jorge2703/scikit-learn,jmschrei/scikit-learn,beepee14/scikit-learn,spallavolu/scikit-learn,q1ang/scikit-learn,mattilyra/scikit-learn,bikong2/scikit-learn,altairpearl/scikit-learn,andrewnc/scikit-learn,tomlof/scikit-learn,Aasmi/scikit-learn,bthirion/scikit-learn,andrewnc/scikit-learn,wanggang3333/scikit-learn,jblackburne/scikit-learn,ClimbsRocks/scikit-learn,mlyundin/scikit-learn,petosegan/scikit-learn,liyu1990/sklearn,scikit-learn/scikit-learn,mxjl620/scikit-learn,jmetzen/scikit-learn,andrewnc/scikit-learn,alvarofierroclavero/scikit-learn,vivekmishra1991/scikit-learn,vermouthmjl/scikit-learn,PatrickChrist/scikit-learn,wanggang3333/scikit-learn,kjung/scikit-learn,alexeyum/scikit-learn,hainm/scikit-learn,henrykironde/scikit-learn,joernhees/scikit-learn,q1ang/scikit-learn,Barmaley-exe/scikit-learn,simon-pepin/scikit-learn,glouppe/scikit-learn,davidgbe/scikit-learn,Titan-C/scikit-learn,pnedunuri/scikit-learn,yonglehou/scikit-learn,alvarofierroclavero/scikit-learn,vibhorag/scikit-learn,gotomypc/scikit-learn,mwv/scikit-learn,ChanChiChoi/scikit-learn,nvoron23/scikit-learn,mayblue9/scikit-learn,schets/scikit-learn,IndraVikas/scikit-learn,jorik041/scikit-learn,Jimmy-Morzaria/scikit-learn,glennq/scikit-learn,heli522/scikit-learn,ngoix/OCRF,etkirsch/scikit-learn,mhue/scikit-learn,MartinSavc/scikit-learn,Djabbz/scikit-learn,mlyundin/scikit-learn,anurag313/scikit-learn,shyamalschandra/scikit-learn,yask123/scikit-learn,rajat1994/scikit-learn,MartinSavc/scikit-learn,ltiao/scikit-learn,altairpearl/scikit-learn,khkaminska/scikit-learn,vibhorag/scikit-learn,plissonf/scikit-learn,ilo10/scikit-learn,jmschrei/scikit-learn,PatrickChrist/scikit-learn,fzalkow/scikit-learn,jkarnows/scikit-learn,ilyes14/scikit-learn,Titan-C/scikit-learn,aflaxman/scikit-learn,dhruv13J/scikit-learn,xwolf12/scikit-learn,MechCoder/scikit-learn,CVML/scikit-learn,mwv/scikit-learn,yanlend/scikit-learn,ZENGXH/scikit-learn,Srisai85/scikit-learn,zihua/scikit-learn,xiaoxiamii/scikit-learn,AnasGhrab/scikit-learn,kaichogami/scikit-learn,idlead/scikit-learn,mayblue9/scikit-learn,fzalkow/scikit-learn,wlamond/scikit-learn,rexshihaoren/scikit-learn,mwv/scikit-learn,wanggang3333/scikit-learn,wazeerzulfikar/scikit-learn,wlamond/scikit-learn,anntzer/scikit-learn,yyjiang/scikit-learn,h2educ/scikit-learn,spallavolu/scikit-learn,nomadcube/scikit-learn,Vimos/scikit-learn,Clyde-fare/scikit-learn,liyu1990/sklearn,ahoyosid/scikit-learn,h2educ/scikit-learn,bigdataelephants/scikit-learn,ningchi/scikit-learn,RachitKansal/scikit-learn,MartinDelzant/scikit-learn,Clyde-fare/scikit-learn,JsNoNo/scikit-learn,CVML/scikit-learn,herilalaina/scikit-learn,sanketloke/scikit-learn,ahoyosid/scikit-learn,pnedunuri/scikit-learn,themrmax/scikit-learn,nesterione/scikit-learn,sinhrks/scikit-learn,jzt5132/scikit-learn,costypetrisor/scikit-learn,kashif/scikit-learn,jorge2703/scikit-learn,sarahgrogan/scikit-learn,anurag313/scikit-learn,justincassidy/scikit-learn,olologin/scikit-learn,dingocuster/scikit-learn,marcocaccin/scikit-learn,loli/semisupervisedforests,jjx02230808/project0223,rrohan/scikit-learn,saiwing-yeung/scikit-learn,cainiaocome/scikit-learn,aminert/scikit-learn,MechCoder/scikit-learn,tdhopper/scikit-learn,Fireblend/scikit-learn,gclenaghan/scikit-learn,plissonf/scikit-learn,phdowling/scikit-learn,jlegendary/scikit-learn,rahuldhote/scikit-learn,ngoix/OCRF,f3r/scikit-learn,zorroblue/scikit-learn,mxjl620/scikit-learn,Sentient07/scikit-learn,PatrickOReilly/scikit-learn,lazywei/scikit-learn,aminert/scikit-learn,ilyes14/scikit-learn,pv/scikit-learn,huobaowangxi/scikit-learn,abimannans/scikit-learn,trankmichael/scikit-learn,pratapvardhan/scikit-learn,carrillo/scikit-learn,abimannans/scikit-learn,cauchycui/scikit-learn,billy-inn/scikit-learn,equialgo/scikit-learn,tosolveit/scikit-learn,cainiaocome/scikit-learn,amueller/scikit-learn,nelson-liu/scikit-learn,aminert/scikit-learn,ephes/scikit-learn,robin-lai/scikit-learn,CforED/Machine-Learning,Adai0808/scikit-learn,samuel1208/scikit-learn,aflaxman/scikit-learn,kaichogami/scikit-learn,manashmndl/scikit-learn,jereze/scikit-learn,hrjn/scikit-learn,MechCoder/scikit-learn,JPFrancoia/scikit-learn,huzq/scikit-learn,mfjb/scikit-learn,RPGOne/scikit-learn,RayMick/scikit-learn,JsNoNo/scikit-learn,joernhees/scikit-learn,PatrickOReilly/scikit-learn,r-mart/scikit-learn,khkaminska/scikit-learn,dsquareindia/scikit-learn,abhishekkrthakur/scikit-learn,LiaoPan/scikit-learn,Obus/scikit-learn,cwu2011/scikit-learn,mxjl620/scikit-learn,smartscheduling/scikit-learn-categorical-tree,Fireblend/scikit-learn,zuku1985/scikit-learn,clemkoa/scikit-learn,Lawrence-Liu/scikit-learn,NelisVerhoef/scikit-learn,zorroblue/scikit-learn,hugobowne/scikit-learn,kjung/scikit-learn,pratapvardhan/scikit-learn,pypot/scikit-learn,xyguo/scikit-learn,hsuantien/scikit-learn,waterponey/scikit-learn,jmetzen/scikit-learn,ivannz/scikit-learn,jakirkham/scikit-learn,BiaDarkia/scikit-learn,xiaoxiamii/scikit-learn,nikitasingh981/scikit-learn,btabibian/scikit-learn,ChanChiChoi/scikit-learn,dingocuster/scikit-learn,ElDeveloper/scikit-learn,xzh86/scikit-learn,tawsifkhan/scikit-learn,IndraVikas/scikit-learn,hsuantien/scikit-learn,ltiao/scikit-learn,rajat1994/scikit-learn,MohammedWasim/scikit-learn,sergeyf/scikit-learn,mblondel/scikit-learn,ChanderG/scikit-learn,xwolf12/scikit-learn,ClimbsRocks/scikit-learn,arabenjamin/scikit-learn,Srisai85/scikit-learn,wzbozon/scikit-learn,raghavrv/scikit-learn,lesteve/scikit-learn,saiwing-yeung/scikit-learn,r-mart/scikit-learn,3manuek/scikit-learn,Aasmi/scikit-learn,JPFrancoia/scikit-learn,ivannz/scikit-learn,RPGOne/scikit-learn,jmetzen/scikit-learn,ZenDevelopmentSystems/scikit-learn,wzbozon/scikit-learn,AnasGhrab/scikit-learn,meduz/scikit-learn,lin-credible/scikit-learn,mjgrav2001/scikit-learn,BiaDarkia/scikit-learn,alexsavio/scikit-learn,rrohan/scikit-learn,Aasmi/scikit-learn,giorgiop/scikit-learn,RachitKansal/scikit-learn,vivekmishra1991/scikit-learn,aewhatley/scikit-learn,fzalkow/scikit-learn,hsiaoyi0504/scikit-learn,wazeerzulfikar/scikit-learn,jayflo/scikit-learn,yanlend/scikit-learn,ngoix/OCRF,jayflo/scikit-learn,zorojean/scikit-learn,RachitKansal/scikit-learn,phdowling/scikit-learn,yunfeilu/scikit-learn,meduz/scikit-learn,fengzhyuan/scikit-learn,trungnt13/scikit-learn,xyguo/scikit-learn,vinayak-mehta/scikit-learn,marcocaccin/scikit-learn,pompiduskus/scikit-learn,tawsifkhan/scikit-learn,victorbergelin/scikit-learn,betatim/scikit-learn,zaxtax/scikit-learn,zhenv5/scikit-learn,waterponey/scikit-learn,RomainBrault/scikit-learn,amueller/scikit-learn,dhruv13J/scikit-learn,amueller/scikit-learn,pompiduskus/scikit-learn,jblackburne/scikit-learn,liangz0707/scikit-learn,wazeerzulfikar/scikit-learn,sarahgrogan/scikit-learn,mattilyra/scikit-learn,YinongLong/scikit-learn,krez13/scikit-learn,spallavolu/scikit-learn,vivekmishra1991/scikit-learn,andaag/scikit-learn,bnaul/scikit-learn,pythonvietnam/scikit-learn,stylianos-kampakis/scikit-learn,nmayorov/scikit-learn,JeanKossaifi/scikit-learn,TomDLT/scikit-learn,andaag/scikit-learn,quheng/scikit-learn,RachitKansal/scikit-learn,loli/semisupervisedforests,shangwuhencc/scikit-learn,theoryno3/scikit-learn,LiaoPan/scikit-learn,roxyboy/scikit-learn,samzhang111/scikit-learn,shikhardb/scikit-learn,massmutual/scikit-learn,alvarofierroclavero/scikit-learn,deepesch/scikit-learn,robbymeals/scikit-learn,ZenDevelopmentSystems/scikit-learn,aflaxman/scikit-learn,kjung/scikit-learn,tawsifkhan/scikit-learn,voxlol/scikit-learn,rishikksh20/scikit-learn,mhue/scikit-learn,sergeyf/scikit-learn,mwv/scikit-learn,nelson-liu/scikit-learn,arahuja/scikit-learn,trankmichael/scikit-learn,zihua/scikit-learn,jakirkham/scikit-learn,CVML/scikit-learn,MechCoder/scikit-learn,henridwyer/scikit-learn,ogrisel/scikit-learn,vortex-ape/scikit-learn,MartinSavc/scikit-learn,sanketloke/scikit-learn,ldirer/scikit-learn,lenovor/scikit-learn,tawsifkhan/scikit-learn,phdowling/scikit-learn,gotomypc/scikit-learn,MatthieuBizien/scikit-learn,3manuek/scikit-learn,Achuth17/scikit-learn,anirudhjayaraman/scikit-learn,AnasGhrab/scikit-learn,xwolf12/scikit-learn,PatrickOReilly/scikit-learn,alvarofierroclavero/scikit-learn,ningchi/scikit-learn,mlyundin/scikit-learn,BiaDarkia/scikit-learn,yyjiang/scikit-learn,marcocaccin/scikit-learn,glennq/scikit-learn,quheng/scikit-learn,cl4rke/scikit-learn,Titan-C/scikit-learn,Aasmi/scikit-learn,samuel1208/scikit-learn,Vimos/scikit-learn,untom/scikit-learn,yanlend/scikit-learn,ycaihua/scikit-learn,schets/scikit-learn,poryfly/scikit-learn,robbymeals/scikit-learn,xavierwu/scikit-learn,icdishb/scikit-learn,vinayak-mehta/scikit-learn,betatim/scikit-learn,rahuldhote/scikit-learn,macks22/scikit-learn,bthirion/scikit-learn,victorbergelin/scikit-learn,michigraber/scikit-learn,tdhopper/scikit-learn,vinayak-mehta/scikit-learn,hrjn/scikit-learn,hainm/scikit-learn,khkaminska/scikit-learn,liangz0707/scikit-learn,Barmaley-exe/scikit-learn,plissonf/scikit-learn,xavierwu/scikit-learn,toastedcornflakes/scikit-learn,ilo10/scikit-learn,cybernet14/scikit-learn,andaag/scikit-learn,raghavrv/scikit-learn,Sentient07/scikit-learn,belltailjp/scikit-learn,aewhatley/scikit-learn,ycaihua/scikit-learn,fbagirov/scikit-learn,NelisVerhoef/scikit-learn,RomainBrault/scikit-learn,stylianos-kampakis/scikit-learn,clemkoa/scikit-learn,Barmaley-exe/scikit-learn,Windy-Ground/scikit-learn,nikitasingh981/scikit-learn,adamgreenhall/scikit-learn,jlegendary/scikit-learn,mugizico/scikit-learn,yunfeilu/scikit-learn,ndingwall/scikit-learn,dsullivan7/scikit-learn,devanshdalal/scikit-learn,mattgiguere/scikit-learn,mojoboss/scikit-learn,vermouthmjl/scikit-learn,vybstat/scikit-learn,RomainBrault/scikit-learn,raghavrv/scikit-learn,glouppe/scikit-learn,adamgreenhall/scikit-learn,zhenv5/scikit-learn,ogrisel/scikit-learn,MartinDelzant/scikit-learn,jaidevd/scikit-learn,nesterione/scikit-learn,jpautom/scikit-learn,fabianp/scikit-learn,samzhang111/scikit-learn,mjudsp/Tsallis,herilalaina/scikit-learn,nelson-liu/scikit-learn,jm-begon/scikit-learn,ycaihua/scikit-learn,nhejazi/scikit-learn,UNR-AERIAL/scikit-learn,hsiaoyi0504/scikit-learn,dsquareindia/scikit-learn,rexshihaoren/scikit-learn,anntzer/scikit-learn,eg-zhang/scikit-learn,procoder317/scikit-learn,vigilv/scikit-learn,procoder317/scikit-learn,JeanKossaifi/scikit-learn,giorgiop/scikit-learn,xyguo/scikit-learn,shenzebang/scikit-learn,DSLituiev/scikit-learn,NunoEdgarGub1/scikit-learn,zhenv5/scikit-learn,vigilv/scikit-learn,xavierwu/scikit-learn,manhhomienbienthuy/scikit-learn,victorbergelin/scikit-learn,murali-munna/scikit-learn,xuewei4d/scikit-learn,heli522/scikit-learn,voxlol/scikit-learn,yunfeilu/scikit-learn,dsullivan7/scikit-learn,ChanChiChoi/scikit-learn,chrisburr/scikit-learn,PrashntS/scikit-learn,pkruskal/scikit-learn,zaxtax/scikit-learn,lin-credible/scikit-learn,ZENGXH/scikit-learn,Lawrence-Liu/scikit-learn,robbymeals/scikit-learn,cl4rke/scikit-learn,tosolveit/scikit-learn,DonBeo/scikit-learn,thientu/scikit-learn,meduz/scikit-learn,ssaeger/scikit-learn,wlamond/scikit-learn,stylianos-kampakis/scikit-learn,r-mart/scikit-learn,appapantula/scikit-learn,henrykironde/scikit-learn,madjelan/scikit-learn,aabadie/scikit-learn,heli522/scikit-learn,nomadcube/scikit-learn,IshankGulati/scikit-learn,Myasuka/scikit-learn,pythonvietnam/scikit-learn,rahuldhote/scikit-learn,alexsavio/scikit-learn,abhishekkrthakur/scikit-learn,betatim/scikit-learn,meduz/scikit-learn,costypetrisor/scikit-learn,moutai/scikit-learn,LohithBlaze/scikit-learn,huobaowangxi/scikit-learn,nomadcube/scikit-learn,themrmax/scikit-learn,mjudsp/Tsallis,ningchi/scikit-learn,tmhm/scikit-learn,tomlof/scikit-learn,krez13/scikit-learn,pianomania/scikit-learn,pkruskal/scikit-learn,devanshdalal/scikit-learn,sergeyf/scikit-learn,f3r/scikit-learn,djgagne/scikit-learn,rohanp/scikit-learn,3manuek/scikit-learn,hsiaoyi0504/scikit-learn,OshynSong/scikit-learn,thientu/scikit-learn,arabenjamin/scikit-learn,LohithBlaze/scikit-learn,aetilley/scikit-learn,fabioticconi/scikit-learn,aabadie/scikit-learn,jjx02230808/project0223,HolgerPeters/scikit-learn,jpautom/scikit-learn,espg/scikit-learn,shusenl/scikit-learn,fzalkow/scikit-learn,OshynSong/scikit-learn,rajat1994/scikit-learn,thientu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,YinongLong/scikit-learn,beepee14/scikit-learn,pv/scikit-learn,ndingwall/scikit-learn,gclenaghan/scikit-learn,ltiao/scikit-learn,mjgrav2001/scikit-learn,bhargav/scikit-learn,CforED/Machine-Learning,OshynSong/scikit-learn,quheng/scikit-learn,q1ang/scikit-learn,bnaul/scikit-learn,rexshihaoren/scikit-learn,xubenben/scikit-learn,jakobworldpeace/scikit-learn,xuewei4d/scikit-learn,PatrickOReilly/scikit-learn,hdmetor/scikit-learn,ChanChiChoi/scikit-learn,kjung/scikit-learn,shangwuhencc/scikit-learn,Achuth17/scikit-learn,joshloyal/scikit-learn,jseabold/scikit-learn,betatim/scikit-learn,fengzhyuan/scikit-learn,hugobowne/scikit-learn,poryfly/scikit-learn,kagayakidan/scikit-learn,zorroblue/scikit-learn,jorik041/scikit-learn,bthirion/scikit-learn,pkruskal/scikit-learn,vshtanko/scikit-learn,hainm/scikit-learn,JeanKossaifi/scikit-learn,fabioticconi/scikit-learn,sumspr/scikit-learn,quheng/scikit-learn,vigilv/scikit-learn,zihua/scikit-learn,fyffyt/scikit-learn,shangwuhencc/scikit-learn,nomadcube/scikit-learn,anntzer/scikit-learn,PrashntS/scikit-learn,lenovor/scikit-learn,carrillo/scikit-learn,cwu2011/scikit-learn,hlin117/scikit-learn,LiaoPan/scikit-learn,elkingtonmcb/scikit-learn,anurag313/scikit-learn,jlegendary/scikit-learn,xubenben/scikit-learn,bikong2/scikit-learn,BiaDarkia/scikit-learn,zihua/scikit-learn,Adai0808/scikit-learn,tosolveit/scikit-learn,hdmetor/scikit-learn,kevin-intel/scikit-learn,shusenl/scikit-learn,sonnyhu/scikit-learn,ivannz/scikit-learn,hlin117/scikit-learn,PatrickChrist/scikit-learn,jmetzen/scikit-learn,RayMick/scikit-learn,untom/scikit-learn,abhishekkrthakur/scikit-learn,devanshdalal/scikit-learn,hrjn/scikit-learn,ndingwall/scikit-learn,kaichogami/scikit-learn,bthirion/scikit-learn,scikit-learn/scikit-learn,nrhine1/scikit-learn,xyguo/scikit-learn,AnasGhrab/scikit-learn,jzt5132/scikit-learn,jpautom/scikit-learn,fengzhyuan/scikit-learn,iismd17/scikit-learn,samuel1208/scikit-learn,glouppe/scikit-learn,ChanderG/scikit-learn,mblondel/scikit-learn,olologin/scikit-learn,CforED/Machine-Learning,huzq/scikit-learn,MohammedWasim/scikit-learn,espg/scikit-learn,frank-tancf/scikit-learn,russel1237/scikit-learn,idlead/scikit-learn,Srisai85/scikit-learn,mattgiguere/scikit-learn,sinhrks/scikit-learn,terkkila/scikit-learn,dsullivan7/scikit-learn,cwu2011/scikit-learn,abimannans/scikit-learn,tmhm/scikit-learn,hsiaoyi0504/scikit-learn,kashif/scikit-learn,vortex-ape/scikit-learn,robbymeals/scikit-learn,massmutual/scikit-learn,jayflo/scikit-learn,khkaminska/scikit-learn,Titan-C/scikit-learn,elkingtonmcb/scikit-learn,tmhm/scikit-learn,moutai/scikit-learn,kylerbrown/scikit-learn,arabenjamin/scikit-learn,qifeigit/scikit-learn,zorojean/scikit-learn,untom/scikit-learn,vinayak-mehta/scikit-learn,PatrickChrist/scikit-learn,stylianos-kampakis/scikit-learn,procoder317/scikit-learn,ngoix/OCRF,LiaoPan/scikit-learn,terkkila/scikit-learn,liyu1990/sklearn,Nyker510/scikit-learn,justincassidy/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jseabold/scikit-learn,potash/scikit-learn,petosegan/scikit-learn,mugizico/scikit-learn,lbishal/scikit-learn,Achuth17/scikit-learn,CVML/scikit-learn,jmschrei/scikit-learn,altairpearl/scikit-learn,yunfeilu/scikit-learn,wazeerzulfikar/scikit-learn,Jimmy-Morzaria/scikit-learn,themrmax/scikit-learn,lbishal/scikit-learn,joernhees/scikit-learn,zhenv5/scikit-learn,DSLituiev/scikit-learn,MartinSavc/scikit-learn,JosmanPS/scikit-learn,victorbergelin/scikit-learn,fbagirov/scikit-learn,davidgbe/scikit-learn,yonglehou/scikit-learn,mhdella/scikit-learn,AlexandreAbraham/scikit-learn,shahankhatch/scikit-learn,JPFrancoia/scikit-learn,UNR-AERIAL/scikit-learn,kagayakidan/scikit-learn,billy-inn/scikit-learn,billy-inn/scikit-learn,MatthieuBizien/scikit-learn,djgagne/scikit-learn,jaidevd/scikit-learn,mfjb/scikit-learn,luo66/scikit-learn,ClimbsRocks/scikit-learn,nrhine1/scikit-learn,vermouthmjl/scikit-learn,akionakamura/scikit-learn,hlin117/scikit-learn,shahankhatch/scikit-learn,schets/scikit-learn,jseabold/scikit-learn,Fireblend/scikit-learn,xiaoxiamii/scikit-learn,arabenjamin/scikit-learn,gclenaghan/scikit-learn,madjelan/scikit-learn,frank-tancf/scikit-learn,Lawrence-Liu/scikit-learn,waterponey/scikit-learn,ephes/scikit-learn,xwolf12/scikit-learn,f3r/scikit-learn,gclenaghan/scikit-learn,imaculate/scikit-learn,HolgerPeters/scikit-learn,glennq/scikit-learn,deepesch/scikit-learn,shenzebang/scikit-learn,nhejazi/scikit-learn,fengzhyuan/scikit-learn,beepee14/scikit-learn,HolgerPeters/scikit-learn,btabibian/scikit-learn,terkkila/scikit-learn,Windy-Ground/scikit-learn,nmayorov/scikit-learn,henridwyer/scikit-learn,belltailjp/scikit-learn,dsullivan7/scikit-learn,rahuldhote/scikit-learn,mxjl620/scikit-learn,petosegan/scikit-learn,phdowling/scikit-learn,aewhatley/scikit-learn,LohithBlaze/scikit-learn,arahuja/scikit-learn,joshloyal/scikit-learn,Akshay0724/scikit-learn,samuel1208/scikit-learn,MatthieuBizien/scikit-learn,samzhang111/scikit-learn,ldirer/scikit-learn,lazywei/scikit-learn,shyamalschandra/scikit-learn,anntzer/scikit-learn,roxyboy/scikit-learn,hdmetor/scikit-learn,mayblue9/scikit-learn,sinhrks/scikit-learn,bigdataelephants/scikit-learn,macks22/scikit-learn,plissonf/scikit-learn,vybstat/scikit-learn,xiaoxiamii/scikit-learn,tdhopper/scikit-learn,mjgrav2001/scikit-learn,trungnt13/scikit-learn,siutanwong/scikit-learn,djgagne/scikit-learn,pypot/scikit-learn,ephes/scikit-learn,mblondel/scikit-learn,simon-pepin/scikit-learn,mblondel/scikit-learn,rishikksh20/scikit-learn,appapantula/scikit-learn,ahoyosid/scikit-learn,joernhees/scikit-learn,rishikksh20/scikit-learn,vshtanko/scikit-learn,cl4rke/scikit-learn,wanggang3333/scikit-learn,h2educ/scikit-learn,ltiao/scikit-learn,alexeyum/scikit-learn,pv/scikit-learn,xzh86/scikit-learn,rvraghav93/scikit-learn,jlegendary/scikit-learn,YinongLong/scikit-learn,sonnyhu/scikit-learn,macks22/scikit-learn,Djabbz/scikit-learn,RPGOne/scikit-learn,trungnt13/scikit-learn,ChanderG/scikit-learn,466152112/scikit-learn,fyffyt/scikit-learn,ycaihua/scikit-learn,rrohan/scikit-learn,billy-inn/scikit-learn,equialgo/scikit-learn,dsquareindia/scikit-learn,elkingtonmcb/scikit-learn,kagayakidan/scikit-learn,anirudhjayaraman/scikit-learn,waterponey/scikit-learn,yonglehou/scikit-learn,Vimos/scikit-learn,kevin-intel/scikit-learn,Lawrence-Liu/scikit-learn,bnaul/scikit-learn,roxyboy/scikit-learn,eg-zhang/scikit-learn,shahankhatch/scikit-learn,henridwyer/scikit-learn,rohanp/scikit-learn,poryfly/scikit-learn,vshtanko/scikit-learn,elkingtonmcb/scikit-learn,AlexRobson/scikit-learn,ilyes14/scikit-learn,kylerbrown/scikit-learn,arjoly/scikit-learn,jzt5132/scikit-learn,massmutual/scikit-learn,shyamalschandra/scikit-learn,cybernet14/scikit-learn,nhejazi/scikit-learn,justincassidy/scikit-learn,Adai0808/scikit-learn,NelisVerhoef/scikit-learn,Nyker510/scikit-learn,466152112/scikit-learn,pianomania/scikit-learn,luo66/scikit-learn,xavierwu/scikit-learn,mugizico/scikit-learn,AIML/scikit-learn,AlexanderFabisch/scikit-learn,samzhang111/scikit-learn,bigdataelephants/scikit-learn,frank-tancf/scikit-learn,fabianp/scikit-learn,lin-credible/scikit-learn,glennq/scikit-learn,chrsrds/scikit-learn,madjelan/scikit-learn,btabibian/scikit-learn,luo66/scikit-learn,raghavrv/scikit-learn,harshaneelhg/scikit-learn,JsNoNo/scikit-learn,jseabold/scikit-learn,ClimbsRocks/scikit-learn,equialgo/scikit-learn,jorge2703/scikit-learn,joshloyal/scikit-learn,akionakamura/scikit-learn,aabadie/scikit-learn,AIML/scikit-learn,gotomypc/scikit-learn,jjx02230808/project0223,AlexandreAbraham/scikit-learn,robin-lai/scikit-learn,rajat1994/scikit-learn,ZENGXH/scikit-learn,mhdella/scikit-learn,carrillo/scikit-learn,Obus/scikit-learn,jorge2703/scikit-learn,jereze/scikit-learn,fredhusser/scikit-learn,iismd17/scikit-learn,glemaitre/scikit-learn,alexeyum/scikit-learn,dingocuster/scikit-learn,mikebenfield/scikit-learn,tosolveit/scikit-learn,3manuek/scikit-learn,mattilyra/scikit-learn,jereze/scikit-learn,vshtanko/scikit-learn,xuewei4d/scikit-learn,RayMick/scikit-learn,jaidevd/scikit-learn,deepesch/scikit-learn,Clyde-fare/scikit-learn,nvoron23/scikit-learn,devanshdalal/scikit-learn,xzh86/scikit-learn,tomlof/scikit-learn,jkarnows/scikit-learn,voxlol/scikit-learn,NelisVerhoef/scikit-learn,Akshay0724/scikit-learn,zorroblue/scikit-learn,themrmax/scikit-learn,ElDeveloper/scikit-learn,belltailjp/scikit-learn,ZenDevelopmentSystems/scikit-learn,Jimmy-Morzaria/scikit-learn,JosmanPS/scikit-learn,icdishb/scikit-learn,spallavolu/scikit-learn,trankmichael/scikit-learn,zaxtax/scikit-learn,Jimmy-Morzaria/scikit-learn,r-mart/scikit-learn,qifeigit/scikit-learn,fredhusser/scikit-learn,fabioticconi/scikit-learn,giorgiop/scikit-learn,djgagne/scikit-learn,jm-begon/scikit-learn,lenovor/scikit-learn,jm-begon/scikit-learn,murali-munna/scikit-learn,idlead/scikit-learn,mikebenfield/scikit-learn,aetilley/scikit-learn,chrisburr/scikit-learn,murali-munna/scikit-learn,mojoboss/scikit-learn,Clyde-fare/scikit-learn,manashmndl/scikit-learn,sarahgrogan/scikit-learn,hsuantien/scikit-learn,PrashntS/scikit-learn,mehdidc/scikit-learn,AlexandreAbraham/scikit-learn,costypetrisor/scikit-learn,herilalaina/scikit-learn,manhhomienbienthuy/scikit-learn,ivannz/scikit-learn,cainiaocome/scikit-learn,pratapvardhan/scikit-learn,IssamLaradji/scikit-learn,rishikksh20/scikit-learn,nrhine1/scikit-learn,abhishekkrthakur/scikit-learn,mayblue9/scikit-learn,ChanderG/scikit-learn,huzq/scikit-learn,heli522/scikit-learn,ngoix/OCRF,ky822/scikit-learn,ashhher3/scikit-learn,giorgiop/scikit-learn,equialgo/scikit-learn,lesteve/scikit-learn,jakobworldpeace/scikit-learn,mjudsp/Tsallis,cainiaocome/scikit-learn,AlexRobson/scikit-learn,michigraber/scikit-learn,DonBeo/scikit-learn,trankmichael/scikit-learn,ishanic/scikit-learn,h2educ/scikit-learn,RayMick/scikit-learn,vybstat/scikit-learn,sonnyhu/scikit-learn,AlexanderFabisch/scikit-learn,akionakamura/scikit-learn,liangz0707/scikit-learn,JPFrancoia/scikit-learn,xubenben/scikit-learn,huobaowangxi/scikit-learn,vortex-ape/scikit-learn,ashhher3/scikit-learn,DSLituiev/scikit-learn,aflaxman/scikit-learn,ningchi/scikit-learn,carrillo/scikit-learn,beepee14/scikit-learn,siutanwong/scikit-learn,chrsrds/scikit-learn,costypetrisor/scikit-learn,vybstat/scikit-learn,JeanKossaifi/scikit-learn,Adai0808/scikit-learn,huobaowangxi/scikit-learn,kagayakidan/scikit-learn,bnaul/scikit-learn,bikong2/scikit-learn,HolgerPeters/scikit-learn,JosmanPS/scikit-learn,Akshay0724/scikit-learn
|
TEST Added tests for KernelRidge based on @mblondel's code in lightning
|
import numpy as np
from sklearn.datasets import make_classification
from sklearn.linear_model import Ridge
from sklearn.kernel_ridge import KernelRidge
from sklearn.utils.testing import assert_array_almost_equal
X, y = make_classification(n_classes=2, random_state=0)
Y = np.array([y, y]).T
def test_kernel_ridge():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed():
K = np.dot(X, X.T)
pred = KernelRidge(kernel="linear").fit(X, y).predict(X)
pred2 = KernelRidge(kernel="precomputed").fit(K, y).predict(K)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed_sample_weight():
K = np.dot(X, X.T)
K2 = K.copy()
sw = np.ones(X.shape[0]) / float(X.shape[0])
KernelRidge(kernel="precomputed").fit(K, y, sample_weight=sw)
assert_array_almost_equal(K, K2)
def test_kernel_ridge_multi_output():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, Y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, Y).predict(X)
assert_array_almost_equal(pred, pred2)
pred3 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
pred3 = np.array([pred3, pred3]).T
assert_array_almost_equal(pred2, pred3)
|
<commit_before><commit_msg>TEST Added tests for KernelRidge based on @mblondel's code in lightning<commit_after>
|
import numpy as np
from sklearn.datasets import make_classification
from sklearn.linear_model import Ridge
from sklearn.kernel_ridge import KernelRidge
from sklearn.utils.testing import assert_array_almost_equal
X, y = make_classification(n_classes=2, random_state=0)
Y = np.array([y, y]).T
def test_kernel_ridge():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed():
K = np.dot(X, X.T)
pred = KernelRidge(kernel="linear").fit(X, y).predict(X)
pred2 = KernelRidge(kernel="precomputed").fit(K, y).predict(K)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed_sample_weight():
K = np.dot(X, X.T)
K2 = K.copy()
sw = np.ones(X.shape[0]) / float(X.shape[0])
KernelRidge(kernel="precomputed").fit(K, y, sample_weight=sw)
assert_array_almost_equal(K, K2)
def test_kernel_ridge_multi_output():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, Y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, Y).predict(X)
assert_array_almost_equal(pred, pred2)
pred3 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
pred3 = np.array([pred3, pred3]).T
assert_array_almost_equal(pred2, pred3)
|
TEST Added tests for KernelRidge based on @mblondel's code in lightningimport numpy as np
from sklearn.datasets import make_classification
from sklearn.linear_model import Ridge
from sklearn.kernel_ridge import KernelRidge
from sklearn.utils.testing import assert_array_almost_equal
X, y = make_classification(n_classes=2, random_state=0)
Y = np.array([y, y]).T
def test_kernel_ridge():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed():
K = np.dot(X, X.T)
pred = KernelRidge(kernel="linear").fit(X, y).predict(X)
pred2 = KernelRidge(kernel="precomputed").fit(K, y).predict(K)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed_sample_weight():
K = np.dot(X, X.T)
K2 = K.copy()
sw = np.ones(X.shape[0]) / float(X.shape[0])
KernelRidge(kernel="precomputed").fit(K, y, sample_weight=sw)
assert_array_almost_equal(K, K2)
def test_kernel_ridge_multi_output():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, Y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, Y).predict(X)
assert_array_almost_equal(pred, pred2)
pred3 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
pred3 = np.array([pred3, pred3]).T
assert_array_almost_equal(pred2, pred3)
|
<commit_before><commit_msg>TEST Added tests for KernelRidge based on @mblondel's code in lightning<commit_after>import numpy as np
from sklearn.datasets import make_classification
from sklearn.linear_model import Ridge
from sklearn.kernel_ridge import KernelRidge
from sklearn.utils.testing import assert_array_almost_equal
X, y = make_classification(n_classes=2, random_state=0)
Y = np.array([y, y]).T
def test_kernel_ridge():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed():
K = np.dot(X, X.T)
pred = KernelRidge(kernel="linear").fit(X, y).predict(X)
pred2 = KernelRidge(kernel="precomputed").fit(K, y).predict(K)
assert_array_almost_equal(pred, pred2)
def test_kernel_ridge_precomputed_sample_weight():
K = np.dot(X, X.T)
K2 = K.copy()
sw = np.ones(X.shape[0]) / float(X.shape[0])
KernelRidge(kernel="precomputed").fit(K, y, sample_weight=sw)
assert_array_almost_equal(K, K2)
def test_kernel_ridge_multi_output():
pred = Ridge(alpha=1, fit_intercept=False).fit(X, Y).predict(X)
pred2 = KernelRidge(kernel="linear", alpha=1).fit(X, Y).predict(X)
assert_array_almost_equal(pred, pred2)
pred3 = KernelRidge(kernel="linear", alpha=1).fit(X, y).predict(X)
pred3 = np.array([pred3, pred3]).T
assert_array_almost_equal(pred2, pred3)
|
|
7ea36b3c1f6d0cdca6b4bcb46cb765f983295b4e
|
v3/test_federation.py
|
v3/test_federation.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import httpretty
from keystoneclient.tests.v3 import utils
from keystoneclient.v3.contrib.federation import identity_providers
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
@httpretty.activate
def test_create(self, ref=None, req_ref=None):
ref = ref or self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g converting
# from datetime object to timestamp string)
req_ref = (req_ref or ref).copy()
req_ref.pop('id')
self.stub_entity(httpretty.PUT, entity=ref, id=ref['id'], status=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
Add CRUD operations for Identity Providers.
|
Add CRUD operations for Identity Providers.
Add relevant methods for adding, updating, listing, getting, and deleting
Identity Provider objects.
Change-Id: Ib77781b507d2c06c368a1877eb716ec7fe2d88e4
Implements: blueprint federation-crud-operations
|
Python
|
apache-2.0
|
varunarya10/keystonemiddleware
|
Add CRUD operations for Identity Providers.
Add relevant methods for adding, updating, listing, getting, and deleting
Identity Provider objects.
Change-Id: Ib77781b507d2c06c368a1877eb716ec7fe2d88e4
Implements: blueprint federation-crud-operations
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import httpretty
from keystoneclient.tests.v3 import utils
from keystoneclient.v3.contrib.federation import identity_providers
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
@httpretty.activate
def test_create(self, ref=None, req_ref=None):
ref = ref or self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g converting
# from datetime object to timestamp string)
req_ref = (req_ref or ref).copy()
req_ref.pop('id')
self.stub_entity(httpretty.PUT, entity=ref, id=ref['id'], status=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
<commit_before><commit_msg>Add CRUD operations for Identity Providers.
Add relevant methods for adding, updating, listing, getting, and deleting
Identity Provider objects.
Change-Id: Ib77781b507d2c06c368a1877eb716ec7fe2d88e4
Implements: blueprint federation-crud-operations<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import httpretty
from keystoneclient.tests.v3 import utils
from keystoneclient.v3.contrib.federation import identity_providers
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
@httpretty.activate
def test_create(self, ref=None, req_ref=None):
ref = ref or self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g converting
# from datetime object to timestamp string)
req_ref = (req_ref or ref).copy()
req_ref.pop('id')
self.stub_entity(httpretty.PUT, entity=ref, id=ref['id'], status=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
Add CRUD operations for Identity Providers.
Add relevant methods for adding, updating, listing, getting, and deleting
Identity Provider objects.
Change-Id: Ib77781b507d2c06c368a1877eb716ec7fe2d88e4
Implements: blueprint federation-crud-operations# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import httpretty
from keystoneclient.tests.v3 import utils
from keystoneclient.v3.contrib.federation import identity_providers
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
@httpretty.activate
def test_create(self, ref=None, req_ref=None):
ref = ref or self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g converting
# from datetime object to timestamp string)
req_ref = (req_ref or ref).copy()
req_ref.pop('id')
self.stub_entity(httpretty.PUT, entity=ref, id=ref['id'], status=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
<commit_before><commit_msg>Add CRUD operations for Identity Providers.
Add relevant methods for adding, updating, listing, getting, and deleting
Identity Provider objects.
Change-Id: Ib77781b507d2c06c368a1877eb716ec7fe2d88e4
Implements: blueprint federation-crud-operations<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import httpretty
from keystoneclient.tests.v3 import utils
from keystoneclient.v3.contrib.federation import identity_providers
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
@httpretty.activate
def test_create(self, ref=None, req_ref=None):
ref = ref or self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g converting
# from datetime object to timestamp string)
req_ref = (req_ref or ref).copy()
req_ref.pop('id')
self.stub_entity(httpretty.PUT, entity=ref, id=ref['id'], status=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
|
|
4d2f90d0671c5425fde6da3165c19340a39a4942
|
test/test_report_mapping.py
|
test/test_report_mapping.py
|
# Import -----------------------------------------------------------------------
import os
from sequana.report_mapping import MappingReport
from sequana import bedtools
from . import data
pathdata = data.__path__[0]
# Test -------------------------------------------------------------------------
def test_report():
mydata = bedtools.genomecov(pathdata + os.sep + "test.bed")
mydata.running_median(n=3, circular=False)
mydata.coverage_scaling()
mydata.compute_zscore()
r = MappingReport()
r.set_data(mydata)
r.create_report()
|
Add test for report mapping
|
Add test for report mapping
|
Python
|
bsd-3-clause
|
sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana
|
Add test for report mapping
|
# Import -----------------------------------------------------------------------
import os
from sequana.report_mapping import MappingReport
from sequana import bedtools
from . import data
pathdata = data.__path__[0]
# Test -------------------------------------------------------------------------
def test_report():
mydata = bedtools.genomecov(pathdata + os.sep + "test.bed")
mydata.running_median(n=3, circular=False)
mydata.coverage_scaling()
mydata.compute_zscore()
r = MappingReport()
r.set_data(mydata)
r.create_report()
|
<commit_before><commit_msg>Add test for report mapping<commit_after>
|
# Import -----------------------------------------------------------------------
import os
from sequana.report_mapping import MappingReport
from sequana import bedtools
from . import data
pathdata = data.__path__[0]
# Test -------------------------------------------------------------------------
def test_report():
mydata = bedtools.genomecov(pathdata + os.sep + "test.bed")
mydata.running_median(n=3, circular=False)
mydata.coverage_scaling()
mydata.compute_zscore()
r = MappingReport()
r.set_data(mydata)
r.create_report()
|
Add test for report mapping# Import -----------------------------------------------------------------------
import os
from sequana.report_mapping import MappingReport
from sequana import bedtools
from . import data
pathdata = data.__path__[0]
# Test -------------------------------------------------------------------------
def test_report():
mydata = bedtools.genomecov(pathdata + os.sep + "test.bed")
mydata.running_median(n=3, circular=False)
mydata.coverage_scaling()
mydata.compute_zscore()
r = MappingReport()
r.set_data(mydata)
r.create_report()
|
<commit_before><commit_msg>Add test for report mapping<commit_after># Import -----------------------------------------------------------------------
import os
from sequana.report_mapping import MappingReport
from sequana import bedtools
from . import data
pathdata = data.__path__[0]
# Test -------------------------------------------------------------------------
def test_report():
mydata = bedtools.genomecov(pathdata + os.sep + "test.bed")
mydata.running_median(n=3, circular=False)
mydata.coverage_scaling()
mydata.compute_zscore()
r = MappingReport()
r.set_data(mydata)
r.create_report()
|
|
cce53a80c34978b9dec96cb77897970ff9cb9d65
|
dataactcore/migrations/versions/d35ecdfc1da7_add_indexing_to_duns_table.py
|
dataactcore/migrations/versions/d35ecdfc1da7_add_indexing_to_duns_table.py
|
"""add indexing to DUNS table
Revision ID: d35ecdfc1da7
Revises: 4b1ee78268fb
Create Date: 2017-08-31 12:03:16.395760
"""
# revision identifiers, used by Alembic.
revision = 'd35ecdfc1da7'
down_revision = '4b1ee78268fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_duns_activation_date'), 'duns', ['activation_date'], unique=False)
op.create_index(op.f('ix_duns_deactivation_date'), 'duns', ['deactivation_date'], unique=False)
op.create_index(op.f('ix_duns_expiration_date'), 'duns', ['expiration_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_duns_expiration_date'), table_name='duns')
op.drop_index(op.f('ix_duns_deactivation_date'), table_name='duns')
op.drop_index(op.f('ix_duns_activation_date'), table_name='duns')
### end Alembic commands ###
|
Add migration for DUNS indexing
|
Add migration for DUNS indexing
|
Python
|
cc0-1.0
|
fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend
|
Add migration for DUNS indexing
|
"""add indexing to DUNS table
Revision ID: d35ecdfc1da7
Revises: 4b1ee78268fb
Create Date: 2017-08-31 12:03:16.395760
"""
# revision identifiers, used by Alembic.
revision = 'd35ecdfc1da7'
down_revision = '4b1ee78268fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_duns_activation_date'), 'duns', ['activation_date'], unique=False)
op.create_index(op.f('ix_duns_deactivation_date'), 'duns', ['deactivation_date'], unique=False)
op.create_index(op.f('ix_duns_expiration_date'), 'duns', ['expiration_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_duns_expiration_date'), table_name='duns')
op.drop_index(op.f('ix_duns_deactivation_date'), table_name='duns')
op.drop_index(op.f('ix_duns_activation_date'), table_name='duns')
### end Alembic commands ###
|
<commit_before><commit_msg>Add migration for DUNS indexing<commit_after>
|
"""add indexing to DUNS table
Revision ID: d35ecdfc1da7
Revises: 4b1ee78268fb
Create Date: 2017-08-31 12:03:16.395760
"""
# revision identifiers, used by Alembic.
revision = 'd35ecdfc1da7'
down_revision = '4b1ee78268fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_duns_activation_date'), 'duns', ['activation_date'], unique=False)
op.create_index(op.f('ix_duns_deactivation_date'), 'duns', ['deactivation_date'], unique=False)
op.create_index(op.f('ix_duns_expiration_date'), 'duns', ['expiration_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_duns_expiration_date'), table_name='duns')
op.drop_index(op.f('ix_duns_deactivation_date'), table_name='duns')
op.drop_index(op.f('ix_duns_activation_date'), table_name='duns')
### end Alembic commands ###
|
Add migration for DUNS indexing"""add indexing to DUNS table
Revision ID: d35ecdfc1da7
Revises: 4b1ee78268fb
Create Date: 2017-08-31 12:03:16.395760
"""
# revision identifiers, used by Alembic.
revision = 'd35ecdfc1da7'
down_revision = '4b1ee78268fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_duns_activation_date'), 'duns', ['activation_date'], unique=False)
op.create_index(op.f('ix_duns_deactivation_date'), 'duns', ['deactivation_date'], unique=False)
op.create_index(op.f('ix_duns_expiration_date'), 'duns', ['expiration_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_duns_expiration_date'), table_name='duns')
op.drop_index(op.f('ix_duns_deactivation_date'), table_name='duns')
op.drop_index(op.f('ix_duns_activation_date'), table_name='duns')
### end Alembic commands ###
|
<commit_before><commit_msg>Add migration for DUNS indexing<commit_after>"""add indexing to DUNS table
Revision ID: d35ecdfc1da7
Revises: 4b1ee78268fb
Create Date: 2017-08-31 12:03:16.395760
"""
# revision identifiers, used by Alembic.
revision = 'd35ecdfc1da7'
down_revision = '4b1ee78268fb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('ix_duns_activation_date'), 'duns', ['activation_date'], unique=False)
op.create_index(op.f('ix_duns_deactivation_date'), 'duns', ['deactivation_date'], unique=False)
op.create_index(op.f('ix_duns_expiration_date'), 'duns', ['expiration_date'], unique=False)
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_duns_expiration_date'), table_name='duns')
op.drop_index(op.f('ix_duns_deactivation_date'), table_name='duns')
op.drop_index(op.f('ix_duns_activation_date'), table_name='duns')
### end Alembic commands ###
|
|
6b27d719afcaa2beba35c012e365c95bf0e5fbd2
|
migrations/versions/6bd350cf4748_add_lab_assistant_role.py
|
migrations/versions/6bd350cf4748_add_lab_assistant_role.py
|
"""Add Lab assistant role
Revision ID: 6bd350cf4748
Revises: 6504bfe5203c
Create Date: 2016-08-28 17:43:34.671050
"""
# revision identifiers, used by Alembic.
revision = '6bd350cf4748'
down_revision = '6504bfe5203c'
from alembic import op
import sqlalchemy as sa
old = ['student', 'grader', 'staff', 'instructor']
new = ['student', 'lab assistant', 'grader', 'staff', 'instructor']
def upgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*old, name='role'),
type_=sa.types.Enum(*new, name='role'))
def downgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*new, name='role'),
type_=sa.types.Enum(*old, name='role'))
|
Add migration for role enum
|
Add migration for role enum
|
Python
|
apache-2.0
|
Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok
|
Add migration for role enum
|
"""Add Lab assistant role
Revision ID: 6bd350cf4748
Revises: 6504bfe5203c
Create Date: 2016-08-28 17:43:34.671050
"""
# revision identifiers, used by Alembic.
revision = '6bd350cf4748'
down_revision = '6504bfe5203c'
from alembic import op
import sqlalchemy as sa
old = ['student', 'grader', 'staff', 'instructor']
new = ['student', 'lab assistant', 'grader', 'staff', 'instructor']
def upgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*old, name='role'),
type_=sa.types.Enum(*new, name='role'))
def downgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*new, name='role'),
type_=sa.types.Enum(*old, name='role'))
|
<commit_before><commit_msg>Add migration for role enum<commit_after>
|
"""Add Lab assistant role
Revision ID: 6bd350cf4748
Revises: 6504bfe5203c
Create Date: 2016-08-28 17:43:34.671050
"""
# revision identifiers, used by Alembic.
revision = '6bd350cf4748'
down_revision = '6504bfe5203c'
from alembic import op
import sqlalchemy as sa
old = ['student', 'grader', 'staff', 'instructor']
new = ['student', 'lab assistant', 'grader', 'staff', 'instructor']
def upgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*old, name='role'),
type_=sa.types.Enum(*new, name='role'))
def downgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*new, name='role'),
type_=sa.types.Enum(*old, name='role'))
|
Add migration for role enum"""Add Lab assistant role
Revision ID: 6bd350cf4748
Revises: 6504bfe5203c
Create Date: 2016-08-28 17:43:34.671050
"""
# revision identifiers, used by Alembic.
revision = '6bd350cf4748'
down_revision = '6504bfe5203c'
from alembic import op
import sqlalchemy as sa
old = ['student', 'grader', 'staff', 'instructor']
new = ['student', 'lab assistant', 'grader', 'staff', 'instructor']
def upgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*old, name='role'),
type_=sa.types.Enum(*new, name='role'))
def downgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*new, name='role'),
type_=sa.types.Enum(*old, name='role'))
|
<commit_before><commit_msg>Add migration for role enum<commit_after>"""Add Lab assistant role
Revision ID: 6bd350cf4748
Revises: 6504bfe5203c
Create Date: 2016-08-28 17:43:34.671050
"""
# revision identifiers, used by Alembic.
revision = '6bd350cf4748'
down_revision = '6504bfe5203c'
from alembic import op
import sqlalchemy as sa
old = ['student', 'grader', 'staff', 'instructor']
new = ['student', 'lab assistant', 'grader', 'staff', 'instructor']
def upgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*old, name='role'),
type_=sa.types.Enum(*new, name='role'))
def downgrade():
op.alter_column("enrollment", "role", existing_type=sa.types.Enum(*new, name='role'),
type_=sa.types.Enum(*old, name='role'))
|
|
e4f0fc2cdd209bbadffae9f3da83b0585a64143f
|
accelerator/migrations/0077_add_program_overview_link_field_to_a_program.py
|
accelerator/migrations/0077_add_program_overview_link_field_to_a_program.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-05-15 14:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0076_change_description_to_textfield'),
]
operations = [
migrations.AddField(
model_name='program',
name='program_overview_link',
field=models.URLField(
blank=True,
null=True,
help_text=('URL of the program overview page, '
'ex: https://masschallenge.org/programs-boston'),
max_length=255,
)
)
]
|
Add migration file form program overview link
|
[AC-6989] Add migration file form program overview link
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
[AC-6989] Add migration file form program overview link
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-05-15 14:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0076_change_description_to_textfield'),
]
operations = [
migrations.AddField(
model_name='program',
name='program_overview_link',
field=models.URLField(
blank=True,
null=True,
help_text=('URL of the program overview page, '
'ex: https://masschallenge.org/programs-boston'),
max_length=255,
)
)
]
|
<commit_before><commit_msg>[AC-6989] Add migration file form program overview link<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-05-15 14:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0076_change_description_to_textfield'),
]
operations = [
migrations.AddField(
model_name='program',
name='program_overview_link',
field=models.URLField(
blank=True,
null=True,
help_text=('URL of the program overview page, '
'ex: https://masschallenge.org/programs-boston'),
max_length=255,
)
)
]
|
[AC-6989] Add migration file form program overview link# -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-05-15 14:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0076_change_description_to_textfield'),
]
operations = [
migrations.AddField(
model_name='program',
name='program_overview_link',
field=models.URLField(
blank=True,
null=True,
help_text=('URL of the program overview page, '
'ex: https://masschallenge.org/programs-boston'),
max_length=255,
)
)
]
|
<commit_before><commit_msg>[AC-6989] Add migration file form program overview link<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-05-15 14:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0076_change_description_to_textfield'),
]
operations = [
migrations.AddField(
model_name='program',
name='program_overview_link',
field=models.URLField(
blank=True,
null=True,
help_text=('URL of the program overview page, '
'ex: https://masschallenge.org/programs-boston'),
max_length=255,
)
)
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.