commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7eed63eb800804d74ccf63e26037495f8858fc67
|
migrations/versions/0354_government_channel.py
|
migrations/versions/0354_government_channel.py
|
"""
Revision ID: 0354_government_channel
Revises: 0353_broadcast_provider_not_null
Create Date: 2021-05-11 16:17:12.479191
"""
from alembic import op
revision = '0354_government_channel'
down_revision = '0353_broadcast_provider_not_null'
def upgrade():
op.execute("INSERT INTO broadcast_channel_types VALUES ('government')")
def downgrade():
# This can't be downgraded if there are rows in service_broadcast_settings which
# have the channel set to government or if broadcasts have already been sent on the
# government channel - it would break foreign key constraints.
op.execute("DELETE FROM broadcast_channel_types WHERE name = 'government'")
|
Add 'government' to broadcast_channel_types table
|
Add 'government' to broadcast_channel_types table
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add 'government' to broadcast_channel_types table
|
"""
Revision ID: 0354_government_channel
Revises: 0353_broadcast_provider_not_null
Create Date: 2021-05-11 16:17:12.479191
"""
from alembic import op
revision = '0354_government_channel'
down_revision = '0353_broadcast_provider_not_null'
def upgrade():
op.execute("INSERT INTO broadcast_channel_types VALUES ('government')")
def downgrade():
# This can't be downgraded if there are rows in service_broadcast_settings which
# have the channel set to government or if broadcasts have already been sent on the
# government channel - it would break foreign key constraints.
op.execute("DELETE FROM broadcast_channel_types WHERE name = 'government'")
|
<commit_before><commit_msg>Add 'government' to broadcast_channel_types table<commit_after>
|
"""
Revision ID: 0354_government_channel
Revises: 0353_broadcast_provider_not_null
Create Date: 2021-05-11 16:17:12.479191
"""
from alembic import op
revision = '0354_government_channel'
down_revision = '0353_broadcast_provider_not_null'
def upgrade():
op.execute("INSERT INTO broadcast_channel_types VALUES ('government')")
def downgrade():
# This can't be downgraded if there are rows in service_broadcast_settings which
# have the channel set to government or if broadcasts have already been sent on the
# government channel - it would break foreign key constraints.
op.execute("DELETE FROM broadcast_channel_types WHERE name = 'government'")
|
Add 'government' to broadcast_channel_types table"""
Revision ID: 0354_government_channel
Revises: 0353_broadcast_provider_not_null
Create Date: 2021-05-11 16:17:12.479191
"""
from alembic import op
revision = '0354_government_channel'
down_revision = '0353_broadcast_provider_not_null'
def upgrade():
op.execute("INSERT INTO broadcast_channel_types VALUES ('government')")
def downgrade():
# This can't be downgraded if there are rows in service_broadcast_settings which
# have the channel set to government or if broadcasts have already been sent on the
# government channel - it would break foreign key constraints.
op.execute("DELETE FROM broadcast_channel_types WHERE name = 'government'")
|
<commit_before><commit_msg>Add 'government' to broadcast_channel_types table<commit_after>"""
Revision ID: 0354_government_channel
Revises: 0353_broadcast_provider_not_null
Create Date: 2021-05-11 16:17:12.479191
"""
from alembic import op
revision = '0354_government_channel'
down_revision = '0353_broadcast_provider_not_null'
def upgrade():
op.execute("INSERT INTO broadcast_channel_types VALUES ('government')")
def downgrade():
# This can't be downgraded if there are rows in service_broadcast_settings which
# have the channel set to government or if broadcasts have already been sent on the
# government channel - it would break foreign key constraints.
op.execute("DELETE FROM broadcast_channel_types WHERE name = 'government'")
|
|
37222a8ad9c5a9b5bf1753ce35dfce28caba0a5a
|
py/reverse-words-in-a-string-iii.py
|
py/reverse-words-in-a-string-iii.py
|
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
return ' '.join([x[::-1] for x in s.split(' ')])
|
Add py solution for 557. Reverse Words in a String III
|
Add py solution for 557. Reverse Words in a String III
557. Reverse Words in a String III: https://leetcode.com/problems/reverse-words-in-a-string-iii/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 557. Reverse Words in a String III
557. Reverse Words in a String III: https://leetcode.com/problems/reverse-words-in-a-string-iii/
|
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
return ' '.join([x[::-1] for x in s.split(' ')])
|
<commit_before><commit_msg>Add py solution for 557. Reverse Words in a String III
557. Reverse Words in a String III: https://leetcode.com/problems/reverse-words-in-a-string-iii/<commit_after>
|
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
return ' '.join([x[::-1] for x in s.split(' ')])
|
Add py solution for 557. Reverse Words in a String III
557. Reverse Words in a String III: https://leetcode.com/problems/reverse-words-in-a-string-iii/class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
return ' '.join([x[::-1] for x in s.split(' ')])
|
<commit_before><commit_msg>Add py solution for 557. Reverse Words in a String III
557. Reverse Words in a String III: https://leetcode.com/problems/reverse-words-in-a-string-iii/<commit_after>class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
return ' '.join([x[::-1] for x in s.split(' ')])
|
|
12dadc417173d1e70a2f4d9b6817b505fd47679b
|
test/test_network_service.py
|
test/test_network_service.py
|
import uuid
from test.helpers import ProviderTestBase
import test.helpers as helpers
class CloudNetworkServiceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudNetworkServiceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_crud_network_service(self):
name = 'cbtestnetwork-{0}'.format(uuid.uuid4())
subnet_name = 'cbtestsubnet-{0}'.format(uuid.uuid4())
net = self.provider.network.create(name=name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete(network_id=net.id)
):
# test list method
netl = self.provider.network.list()
list_netl = [n for n in netl if n.name == name]
self.assertTrue(
len(list_netl) == 1,
"List networks does not return the expected network %s" %
name)
# check get
get_net = self.provider.network.get(network_id=net.id)
self.assertTrue(
get_net == net,
"Get network did not return the expected network {0}."
.format(name))
# check subnet
subnet = self.provider.network.create_subnet(
network=net, cidr_block="10.0.0.1/24", name=subnet_name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete_subnet(subnet=subnet)
):
# test list method
subnetl = self.provider.network.list_subnets()
list_subnetl = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(list_subnetl) == 1,
"List subnets does not return the expected subnet %s" %
subnet_name)
subnetl = self.provider.network.list_subnets()
found_subnet = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(found_subnet) == 0,
"Subnet {0} should have been deleted but still exists."
.format(subnet_name))
netl = self.provider.network.list()
found_net = [n for n in netl if n.name == name]
self.assertTrue(
len(found_net) == 0,
"Network {0} should have been deleted but still exists."
.format(name))
|
Add tests for Network service
|
Add tests for Network service
|
Python
|
mit
|
ms-azure-cloudbroker/cloudbridge,gvlproject/libcloudbridge,gvlproject/cloudbridge
|
Add tests for Network service
|
import uuid
from test.helpers import ProviderTestBase
import test.helpers as helpers
class CloudNetworkServiceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudNetworkServiceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_crud_network_service(self):
name = 'cbtestnetwork-{0}'.format(uuid.uuid4())
subnet_name = 'cbtestsubnet-{0}'.format(uuid.uuid4())
net = self.provider.network.create(name=name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete(network_id=net.id)
):
# test list method
netl = self.provider.network.list()
list_netl = [n for n in netl if n.name == name]
self.assertTrue(
len(list_netl) == 1,
"List networks does not return the expected network %s" %
name)
# check get
get_net = self.provider.network.get(network_id=net.id)
self.assertTrue(
get_net == net,
"Get network did not return the expected network {0}."
.format(name))
# check subnet
subnet = self.provider.network.create_subnet(
network=net, cidr_block="10.0.0.1/24", name=subnet_name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete_subnet(subnet=subnet)
):
# test list method
subnetl = self.provider.network.list_subnets()
list_subnetl = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(list_subnetl) == 1,
"List subnets does not return the expected subnet %s" %
subnet_name)
subnetl = self.provider.network.list_subnets()
found_subnet = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(found_subnet) == 0,
"Subnet {0} should have been deleted but still exists."
.format(subnet_name))
netl = self.provider.network.list()
found_net = [n for n in netl if n.name == name]
self.assertTrue(
len(found_net) == 0,
"Network {0} should have been deleted but still exists."
.format(name))
|
<commit_before><commit_msg>Add tests for Network service<commit_after>
|
import uuid
from test.helpers import ProviderTestBase
import test.helpers as helpers
class CloudNetworkServiceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudNetworkServiceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_crud_network_service(self):
name = 'cbtestnetwork-{0}'.format(uuid.uuid4())
subnet_name = 'cbtestsubnet-{0}'.format(uuid.uuid4())
net = self.provider.network.create(name=name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete(network_id=net.id)
):
# test list method
netl = self.provider.network.list()
list_netl = [n for n in netl if n.name == name]
self.assertTrue(
len(list_netl) == 1,
"List networks does not return the expected network %s" %
name)
# check get
get_net = self.provider.network.get(network_id=net.id)
self.assertTrue(
get_net == net,
"Get network did not return the expected network {0}."
.format(name))
# check subnet
subnet = self.provider.network.create_subnet(
network=net, cidr_block="10.0.0.1/24", name=subnet_name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete_subnet(subnet=subnet)
):
# test list method
subnetl = self.provider.network.list_subnets()
list_subnetl = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(list_subnetl) == 1,
"List subnets does not return the expected subnet %s" %
subnet_name)
subnetl = self.provider.network.list_subnets()
found_subnet = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(found_subnet) == 0,
"Subnet {0} should have been deleted but still exists."
.format(subnet_name))
netl = self.provider.network.list()
found_net = [n for n in netl if n.name == name]
self.assertTrue(
len(found_net) == 0,
"Network {0} should have been deleted but still exists."
.format(name))
|
Add tests for Network serviceimport uuid
from test.helpers import ProviderTestBase
import test.helpers as helpers
class CloudNetworkServiceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudNetworkServiceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_crud_network_service(self):
name = 'cbtestnetwork-{0}'.format(uuid.uuid4())
subnet_name = 'cbtestsubnet-{0}'.format(uuid.uuid4())
net = self.provider.network.create(name=name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete(network_id=net.id)
):
# test list method
netl = self.provider.network.list()
list_netl = [n for n in netl if n.name == name]
self.assertTrue(
len(list_netl) == 1,
"List networks does not return the expected network %s" %
name)
# check get
get_net = self.provider.network.get(network_id=net.id)
self.assertTrue(
get_net == net,
"Get network did not return the expected network {0}."
.format(name))
# check subnet
subnet = self.provider.network.create_subnet(
network=net, cidr_block="10.0.0.1/24", name=subnet_name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete_subnet(subnet=subnet)
):
# test list method
subnetl = self.provider.network.list_subnets()
list_subnetl = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(list_subnetl) == 1,
"List subnets does not return the expected subnet %s" %
subnet_name)
subnetl = self.provider.network.list_subnets()
found_subnet = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(found_subnet) == 0,
"Subnet {0} should have been deleted but still exists."
.format(subnet_name))
netl = self.provider.network.list()
found_net = [n for n in netl if n.name == name]
self.assertTrue(
len(found_net) == 0,
"Network {0} should have been deleted but still exists."
.format(name))
|
<commit_before><commit_msg>Add tests for Network service<commit_after>import uuid
from test.helpers import ProviderTestBase
import test.helpers as helpers
class CloudNetworkServiceTestCase(ProviderTestBase):
def __init__(self, methodName, provider):
super(CloudNetworkServiceTestCase, self).__init__(
methodName=methodName, provider=provider)
def test_crud_network_service(self):
name = 'cbtestnetwork-{0}'.format(uuid.uuid4())
subnet_name = 'cbtestsubnet-{0}'.format(uuid.uuid4())
net = self.provider.network.create(name=name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete(network_id=net.id)
):
# test list method
netl = self.provider.network.list()
list_netl = [n for n in netl if n.name == name]
self.assertTrue(
len(list_netl) == 1,
"List networks does not return the expected network %s" %
name)
# check get
get_net = self.provider.network.get(network_id=net.id)
self.assertTrue(
get_net == net,
"Get network did not return the expected network {0}."
.format(name))
# check subnet
subnet = self.provider.network.create_subnet(
network=net, cidr_block="10.0.0.1/24", name=subnet_name)
with helpers.cleanup_action(
lambda:
self.provider.network.delete_subnet(subnet=subnet)
):
# test list method
subnetl = self.provider.network.list_subnets()
list_subnetl = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(list_subnetl) == 1,
"List subnets does not return the expected subnet %s" %
subnet_name)
subnetl = self.provider.network.list_subnets()
found_subnet = [n for n in subnetl if n.name == subnet_name]
self.assertTrue(
len(found_subnet) == 0,
"Subnet {0} should have been deleted but still exists."
.format(subnet_name))
netl = self.provider.network.list()
found_net = [n for n in netl if n.name == name]
self.assertTrue(
len(found_net) == 0,
"Network {0} should have been deleted but still exists."
.format(name))
|
|
ab9195f85c4cc4b06a41f616be37dba546579abb
|
results/table-helmholtz-reductions.py
|
results/table-helmholtz-reductions.py
|
import os
import sys
import pandas as pd
p4_data = "helmholtz-results/helmholtz_conv-d-4.csv"
p5_data = "helmholtz-results/helmholtz_conv-d-5.csv"
p6_data = "helmholtz-results/helmholtz_conv-d-6.csv"
p7_data = "helmholtz-results/helmholtz_conv-d-7.csv"
data_set = [p4_data, p5_data, p6_data, p7_data]
for data in data_set:
if not os.path.exists(data):
print("Cannot find data file '%s'" % data)
sys.exit(1)
dfs = pd.concat(pd.read_csv(data) for data in data_set)
groups = dfs.groupby(["Degree"], as_index=False)
table = r"""Degree & Reductions (avg.): $\frac{\norm{b - A x^*}_2}{\norm{b - A x^0}_2}$\\
\hline
"""
lformat = r"""{degree: d} & {reductions:.4g}\\
"""
for group in groups:
degree, df = group
table += lformat.format(degree=degree,
reductions=df["ResidualReductions"].mean())
print(table)
|
Add script for helmholtz residual reductions table
|
Add script for helmholtz residual reductions table
|
Python
|
mit
|
thomasgibson/tabula-rasa
|
Add script for helmholtz residual reductions table
|
import os
import sys
import pandas as pd
p4_data = "helmholtz-results/helmholtz_conv-d-4.csv"
p5_data = "helmholtz-results/helmholtz_conv-d-5.csv"
p6_data = "helmholtz-results/helmholtz_conv-d-6.csv"
p7_data = "helmholtz-results/helmholtz_conv-d-7.csv"
data_set = [p4_data, p5_data, p6_data, p7_data]
for data in data_set:
if not os.path.exists(data):
print("Cannot find data file '%s'" % data)
sys.exit(1)
dfs = pd.concat(pd.read_csv(data) for data in data_set)
groups = dfs.groupby(["Degree"], as_index=False)
table = r"""Degree & Reductions (avg.): $\frac{\norm{b - A x^*}_2}{\norm{b - A x^0}_2}$\\
\hline
"""
lformat = r"""{degree: d} & {reductions:.4g}\\
"""
for group in groups:
degree, df = group
table += lformat.format(degree=degree,
reductions=df["ResidualReductions"].mean())
print(table)
|
<commit_before><commit_msg>Add script for helmholtz residual reductions table<commit_after>
|
import os
import sys
import pandas as pd
p4_data = "helmholtz-results/helmholtz_conv-d-4.csv"
p5_data = "helmholtz-results/helmholtz_conv-d-5.csv"
p6_data = "helmholtz-results/helmholtz_conv-d-6.csv"
p7_data = "helmholtz-results/helmholtz_conv-d-7.csv"
data_set = [p4_data, p5_data, p6_data, p7_data]
for data in data_set:
if not os.path.exists(data):
print("Cannot find data file '%s'" % data)
sys.exit(1)
dfs = pd.concat(pd.read_csv(data) for data in data_set)
groups = dfs.groupby(["Degree"], as_index=False)
table = r"""Degree & Reductions (avg.): $\frac{\norm{b - A x^*}_2}{\norm{b - A x^0}_2}$\\
\hline
"""
lformat = r"""{degree: d} & {reductions:.4g}\\
"""
for group in groups:
degree, df = group
table += lformat.format(degree=degree,
reductions=df["ResidualReductions"].mean())
print(table)
|
Add script for helmholtz residual reductions tableimport os
import sys
import pandas as pd
p4_data = "helmholtz-results/helmholtz_conv-d-4.csv"
p5_data = "helmholtz-results/helmholtz_conv-d-5.csv"
p6_data = "helmholtz-results/helmholtz_conv-d-6.csv"
p7_data = "helmholtz-results/helmholtz_conv-d-7.csv"
data_set = [p4_data, p5_data, p6_data, p7_data]
for data in data_set:
if not os.path.exists(data):
print("Cannot find data file '%s'" % data)
sys.exit(1)
dfs = pd.concat(pd.read_csv(data) for data in data_set)
groups = dfs.groupby(["Degree"], as_index=False)
table = r"""Degree & Reductions (avg.): $\frac{\norm{b - A x^*}_2}{\norm{b - A x^0}_2}$\\
\hline
"""
lformat = r"""{degree: d} & {reductions:.4g}\\
"""
for group in groups:
degree, df = group
table += lformat.format(degree=degree,
reductions=df["ResidualReductions"].mean())
print(table)
|
<commit_before><commit_msg>Add script for helmholtz residual reductions table<commit_after>import os
import sys
import pandas as pd
p4_data = "helmholtz-results/helmholtz_conv-d-4.csv"
p5_data = "helmholtz-results/helmholtz_conv-d-5.csv"
p6_data = "helmholtz-results/helmholtz_conv-d-6.csv"
p7_data = "helmholtz-results/helmholtz_conv-d-7.csv"
data_set = [p4_data, p5_data, p6_data, p7_data]
for data in data_set:
if not os.path.exists(data):
print("Cannot find data file '%s'" % data)
sys.exit(1)
dfs = pd.concat(pd.read_csv(data) for data in data_set)
groups = dfs.groupby(["Degree"], as_index=False)
table = r"""Degree & Reductions (avg.): $\frac{\norm{b - A x^*}_2}{\norm{b - A x^0}_2}$\\
\hline
"""
lformat = r"""{degree: d} & {reductions:.4g}\\
"""
for group in groups:
degree, df = group
table += lformat.format(degree=degree,
reductions=df["ResidualReductions"].mean())
print(table)
|
|
a191377b3490f0f3b14e1cbddda990fa7a73a6e0
|
tools/convert_v1_net_data.py
|
tools/convert_v1_net_data.py
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import yaml
def parse_opts():
"""Parse all of the conversion options."""
parser = argparse.ArgumentParser(
description="Convert a network V1 template to a V2 template."
)
parser.add_argument(
"v1", metavar="<network_data.yaml>", help="Existing V1 Template."
)
return parser.parse_args()
def main():
"""Convert a network v1 template to the network v2 format.
The V1 template will be converted to V2 format. The V1 template will be
saved as a backup file before writing the V2 net-data format.
"""
args = parse_opts()
net_data_file = os.path.abspath(os.path.expanduser(args.v1))
with open(net_data_file) as f:
template_data = yaml.safe_load(f)
new_template_data = list()
for item in template_data:
new_item = dict()
item.pop("enabled", False) # Drop unused var
name = new_item["name"] = item.pop("name")
name_lower = new_item["name_lower"] = item.pop(
"name_lower", name.lower()
)
new_item["vip"] = item.pop("vip", False)
new_item["mtu"] = item.pop("mtu", 1500)
new_item["subnets"] = item.pop("subnets", dict())
new_item["subnets"]["{}_subnet".format(name_lower)] = item
new_template_data.append(new_item)
os.rename(net_data_file, "{}.bak".format(net_data_file))
try:
# content is converted to yaml before opening the file.
# This is done to ensure that we're not breaking any existing files
# during the conversion process.
dump_yaml = yaml.safe_dump(
new_template_data, default_style=False, sort_keys=False
)
except Exception as e:
print("Conversion could not be completed. Error:{}".format(str(e)))
else:
with open(net_data_file, "w") as f:
f.write(dump_yaml)
if __name__ == "__main__":
main()
|
Add tool to convert net-data from v1 to v2
|
Add tool to convert net-data from v1 to v2
This new tool will convert the old v1 net-data format to v2. This tool
will create a backup of the older net-data file so that its possible to
recover data should anything go wrong in the conversion. Upon completion
the tool will create the v2 file using the existing file format name.
Change-Id: I4b4d79b0f68287921fedb3ad7b29b50df5ae3f02
Signed-off-by: Kevin Carter <1d9a750a4fc7026fb02029431dc8a87d24487135@redhat.com>
|
Python
|
apache-2.0
|
openstack/tripleo-heat-templates,openstack/tripleo-heat-templates
|
Add tool to convert net-data from v1 to v2
This new tool will convert the old v1 net-data format to v2. This tool
will create a backup of the older net-data file so that its possible to
recover data should anything go wrong in the conversion. Upon completion
the tool will create the v2 file using the existing file format name.
Change-Id: I4b4d79b0f68287921fedb3ad7b29b50df5ae3f02
Signed-off-by: Kevin Carter <1d9a750a4fc7026fb02029431dc8a87d24487135@redhat.com>
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import yaml
def parse_opts():
"""Parse all of the conversion options."""
parser = argparse.ArgumentParser(
description="Convert a network V1 template to a V2 template."
)
parser.add_argument(
"v1", metavar="<network_data.yaml>", help="Existing V1 Template."
)
return parser.parse_args()
def main():
"""Convert a network v1 template to the network v2 format.
The V1 template will be converted to V2 format. The V1 template will be
saved as a backup file before writing the V2 net-data format.
"""
args = parse_opts()
net_data_file = os.path.abspath(os.path.expanduser(args.v1))
with open(net_data_file) as f:
template_data = yaml.safe_load(f)
new_template_data = list()
for item in template_data:
new_item = dict()
item.pop("enabled", False) # Drop unused var
name = new_item["name"] = item.pop("name")
name_lower = new_item["name_lower"] = item.pop(
"name_lower", name.lower()
)
new_item["vip"] = item.pop("vip", False)
new_item["mtu"] = item.pop("mtu", 1500)
new_item["subnets"] = item.pop("subnets", dict())
new_item["subnets"]["{}_subnet".format(name_lower)] = item
new_template_data.append(new_item)
os.rename(net_data_file, "{}.bak".format(net_data_file))
try:
# content is converted to yaml before opening the file.
# This is done to ensure that we're not breaking any existing files
# during the conversion process.
dump_yaml = yaml.safe_dump(
new_template_data, default_style=False, sort_keys=False
)
except Exception as e:
print("Conversion could not be completed. Error:{}".format(str(e)))
else:
with open(net_data_file, "w") as f:
f.write(dump_yaml)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add tool to convert net-data from v1 to v2
This new tool will convert the old v1 net-data format to v2. This tool
will create a backup of the older net-data file so that its possible to
recover data should anything go wrong in the conversion. Upon completion
the tool will create the v2 file using the existing file format name.
Change-Id: I4b4d79b0f68287921fedb3ad7b29b50df5ae3f02
Signed-off-by: Kevin Carter <1d9a750a4fc7026fb02029431dc8a87d24487135@redhat.com><commit_after>
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import yaml
def parse_opts():
"""Parse all of the conversion options."""
parser = argparse.ArgumentParser(
description="Convert a network V1 template to a V2 template."
)
parser.add_argument(
"v1", metavar="<network_data.yaml>", help="Existing V1 Template."
)
return parser.parse_args()
def main():
"""Convert a network v1 template to the network v2 format.
The V1 template will be converted to V2 format. The V1 template will be
saved as a backup file before writing the V2 net-data format.
"""
args = parse_opts()
net_data_file = os.path.abspath(os.path.expanduser(args.v1))
with open(net_data_file) as f:
template_data = yaml.safe_load(f)
new_template_data = list()
for item in template_data:
new_item = dict()
item.pop("enabled", False) # Drop unused var
name = new_item["name"] = item.pop("name")
name_lower = new_item["name_lower"] = item.pop(
"name_lower", name.lower()
)
new_item["vip"] = item.pop("vip", False)
new_item["mtu"] = item.pop("mtu", 1500)
new_item["subnets"] = item.pop("subnets", dict())
new_item["subnets"]["{}_subnet".format(name_lower)] = item
new_template_data.append(new_item)
os.rename(net_data_file, "{}.bak".format(net_data_file))
try:
# content is converted to yaml before opening the file.
# This is done to ensure that we're not breaking any existing files
# during the conversion process.
dump_yaml = yaml.safe_dump(
new_template_data, default_style=False, sort_keys=False
)
except Exception as e:
print("Conversion could not be completed. Error:{}".format(str(e)))
else:
with open(net_data_file, "w") as f:
f.write(dump_yaml)
if __name__ == "__main__":
main()
|
Add tool to convert net-data from v1 to v2
This new tool will convert the old v1 net-data format to v2. This tool
will create a backup of the older net-data file so that its possible to
recover data should anything go wrong in the conversion. Upon completion
the tool will create the v2 file using the existing file format name.
Change-Id: I4b4d79b0f68287921fedb3ad7b29b50df5ae3f02
Signed-off-by: Kevin Carter <1d9a750a4fc7026fb02029431dc8a87d24487135@redhat.com>#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import yaml
def parse_opts():
"""Parse all of the conversion options."""
parser = argparse.ArgumentParser(
description="Convert a network V1 template to a V2 template."
)
parser.add_argument(
"v1", metavar="<network_data.yaml>", help="Existing V1 Template."
)
return parser.parse_args()
def main():
"""Convert a network v1 template to the network v2 format.
The V1 template will be converted to V2 format. The V1 template will be
saved as a backup file before writing the V2 net-data format.
"""
args = parse_opts()
net_data_file = os.path.abspath(os.path.expanduser(args.v1))
with open(net_data_file) as f:
template_data = yaml.safe_load(f)
new_template_data = list()
for item in template_data:
new_item = dict()
item.pop("enabled", False) # Drop unused var
name = new_item["name"] = item.pop("name")
name_lower = new_item["name_lower"] = item.pop(
"name_lower", name.lower()
)
new_item["vip"] = item.pop("vip", False)
new_item["mtu"] = item.pop("mtu", 1500)
new_item["subnets"] = item.pop("subnets", dict())
new_item["subnets"]["{}_subnet".format(name_lower)] = item
new_template_data.append(new_item)
os.rename(net_data_file, "{}.bak".format(net_data_file))
try:
# content is converted to yaml before opening the file.
# This is done to ensure that we're not breaking any existing files
# during the conversion process.
dump_yaml = yaml.safe_dump(
new_template_data, default_style=False, sort_keys=False
)
except Exception as e:
print("Conversion could not be completed. Error:{}".format(str(e)))
else:
with open(net_data_file, "w") as f:
f.write(dump_yaml)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add tool to convert net-data from v1 to v2
This new tool will convert the old v1 net-data format to v2. This tool
will create a backup of the older net-data file so that its possible to
recover data should anything go wrong in the conversion. Upon completion
the tool will create the v2 file using the existing file format name.
Change-Id: I4b4d79b0f68287921fedb3ad7b29b50df5ae3f02
Signed-off-by: Kevin Carter <1d9a750a4fc7026fb02029431dc8a87d24487135@redhat.com><commit_after>#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
import yaml
def parse_opts():
"""Parse all of the conversion options."""
parser = argparse.ArgumentParser(
description="Convert a network V1 template to a V2 template."
)
parser.add_argument(
"v1", metavar="<network_data.yaml>", help="Existing V1 Template."
)
return parser.parse_args()
def main():
"""Convert a network v1 template to the network v2 format.
The V1 template will be converted to V2 format. The V1 template will be
saved as a backup file before writing the V2 net-data format.
"""
args = parse_opts()
net_data_file = os.path.abspath(os.path.expanduser(args.v1))
with open(net_data_file) as f:
template_data = yaml.safe_load(f)
new_template_data = list()
for item in template_data:
new_item = dict()
item.pop("enabled", False) # Drop unused var
name = new_item["name"] = item.pop("name")
name_lower = new_item["name_lower"] = item.pop(
"name_lower", name.lower()
)
new_item["vip"] = item.pop("vip", False)
new_item["mtu"] = item.pop("mtu", 1500)
new_item["subnets"] = item.pop("subnets", dict())
new_item["subnets"]["{}_subnet".format(name_lower)] = item
new_template_data.append(new_item)
os.rename(net_data_file, "{}.bak".format(net_data_file))
try:
# content is converted to yaml before opening the file.
# This is done to ensure that we're not breaking any existing files
# during the conversion process.
dump_yaml = yaml.safe_dump(
new_template_data, default_style=False, sort_keys=False
)
except Exception as e:
print("Conversion could not be completed. Error:{}".format(str(e)))
else:
with open(net_data_file, "w") as f:
f.write(dump_yaml)
if __name__ == "__main__":
main()
|
|
7f60ca7163c2448b363b349ee4fbcc6abf19336c
|
utilities/update_language.py
|
utilities/update_language.py
|
# utilities.update_language
# This language utility updates an existing Bible data file for the given
# language (using existing data, such as default version, to recreate the data
# file)
from __future__ import unicode_literals
import argparse
import yvs.shared as shared
from operator import itemgetter
from add_language import add_language
# Parses all command-line arguments
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'language_id',
metavar='code',
help='the ISO 639-1 code of the language')
return parser.parse_args()
def main():
cli_args = parse_cli_args()
bible = shared.get_bible_data(cli_args.language_id)
default_version = bible['default_version']
max_version_id = max(bible['versions'], key=itemgetter('id'))['id']
add_language(
cli_args.language_id,
default_version,
max_version_id)
print('Support for {} has been successfully updated.'.format(
cli_args.language_id.replace('_', '-')))
if __name__ == '__main__':
main()
|
Add utility for updating existing language file
|
Add utility for updating existing language file
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
Add utility for updating existing language file
|
# utilities.update_language
# This language utility updates an existing Bible data file for the given
# language (using existing data, such as default version, to recreate the data
# file)
from __future__ import unicode_literals
import argparse
import yvs.shared as shared
from operator import itemgetter
from add_language import add_language
# Parses all command-line arguments
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'language_id',
metavar='code',
help='the ISO 639-1 code of the language')
return parser.parse_args()
def main():
cli_args = parse_cli_args()
bible = shared.get_bible_data(cli_args.language_id)
default_version = bible['default_version']
max_version_id = max(bible['versions'], key=itemgetter('id'))['id']
add_language(
cli_args.language_id,
default_version,
max_version_id)
print('Support for {} has been successfully updated.'.format(
cli_args.language_id.replace('_', '-')))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add utility for updating existing language file<commit_after>
|
# utilities.update_language
# This language utility updates an existing Bible data file for the given
# language (using existing data, such as default version, to recreate the data
# file)
from __future__ import unicode_literals
import argparse
import yvs.shared as shared
from operator import itemgetter
from add_language import add_language
# Parses all command-line arguments
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'language_id',
metavar='code',
help='the ISO 639-1 code of the language')
return parser.parse_args()
def main():
cli_args = parse_cli_args()
bible = shared.get_bible_data(cli_args.language_id)
default_version = bible['default_version']
max_version_id = max(bible['versions'], key=itemgetter('id'))['id']
add_language(
cli_args.language_id,
default_version,
max_version_id)
print('Support for {} has been successfully updated.'.format(
cli_args.language_id.replace('_', '-')))
if __name__ == '__main__':
main()
|
Add utility for updating existing language file# utilities.update_language
# This language utility updates an existing Bible data file for the given
# language (using existing data, such as default version, to recreate the data
# file)
from __future__ import unicode_literals
import argparse
import yvs.shared as shared
from operator import itemgetter
from add_language import add_language
# Parses all command-line arguments
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'language_id',
metavar='code',
help='the ISO 639-1 code of the language')
return parser.parse_args()
def main():
cli_args = parse_cli_args()
bible = shared.get_bible_data(cli_args.language_id)
default_version = bible['default_version']
max_version_id = max(bible['versions'], key=itemgetter('id'))['id']
add_language(
cli_args.language_id,
default_version,
max_version_id)
print('Support for {} has been successfully updated.'.format(
cli_args.language_id.replace('_', '-')))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add utility for updating existing language file<commit_after># utilities.update_language
# This language utility updates an existing Bible data file for the given
# language (using existing data, such as default version, to recreate the data
# file)
from __future__ import unicode_literals
import argparse
import yvs.shared as shared
from operator import itemgetter
from add_language import add_language
# Parses all command-line arguments
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'language_id',
metavar='code',
help='the ISO 639-1 code of the language')
return parser.parse_args()
def main():
cli_args = parse_cli_args()
bible = shared.get_bible_data(cli_args.language_id)
default_version = bible['default_version']
max_version_id = max(bible['versions'], key=itemgetter('id'))['id']
add_language(
cli_args.language_id,
default_version,
max_version_id)
print('Support for {} has been successfully updated.'.format(
cli_args.language_id.replace('_', '-')))
if __name__ == '__main__':
main()
|
|
b540a1ebc6b389c1be6cba2bb6b01836ad517d6c
|
py/replace-words.py
|
py/replace-words.py
|
from collections import defaultdict
class Node(object):
def __init__(self):
self.children = defaultdict(Node)
self.end = False
class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
root = Node()
for w in dict:
cur = root
for c in w:
cur = cur.children[c]
cur.end = True
ans = []
for w in sentence.split():
w_tmp = []
cur = root
for c in w:
w_tmp.append(c)
if c not in cur.children:
ans.append(w)
break
else:
cur = cur.children[c]
if cur.end:
ans.append(''.join(w_tmp))
break
else:
ans.append(w)
return ' '.join(ans)
|
Add py solution for 648. Replace Words
|
Add py solution for 648. Replace Words
648. Replace Words: https://leetcode.com/problems/replace-words/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 648. Replace Words
648. Replace Words: https://leetcode.com/problems/replace-words/
|
from collections import defaultdict
class Node(object):
def __init__(self):
self.children = defaultdict(Node)
self.end = False
class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
root = Node()
for w in dict:
cur = root
for c in w:
cur = cur.children[c]
cur.end = True
ans = []
for w in sentence.split():
w_tmp = []
cur = root
for c in w:
w_tmp.append(c)
if c not in cur.children:
ans.append(w)
break
else:
cur = cur.children[c]
if cur.end:
ans.append(''.join(w_tmp))
break
else:
ans.append(w)
return ' '.join(ans)
|
<commit_before><commit_msg>Add py solution for 648. Replace Words
648. Replace Words: https://leetcode.com/problems/replace-words/<commit_after>
|
from collections import defaultdict
class Node(object):
def __init__(self):
self.children = defaultdict(Node)
self.end = False
class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
root = Node()
for w in dict:
cur = root
for c in w:
cur = cur.children[c]
cur.end = True
ans = []
for w in sentence.split():
w_tmp = []
cur = root
for c in w:
w_tmp.append(c)
if c not in cur.children:
ans.append(w)
break
else:
cur = cur.children[c]
if cur.end:
ans.append(''.join(w_tmp))
break
else:
ans.append(w)
return ' '.join(ans)
|
Add py solution for 648. Replace Words
648. Replace Words: https://leetcode.com/problems/replace-words/from collections import defaultdict
class Node(object):
def __init__(self):
self.children = defaultdict(Node)
self.end = False
class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
root = Node()
for w in dict:
cur = root
for c in w:
cur = cur.children[c]
cur.end = True
ans = []
for w in sentence.split():
w_tmp = []
cur = root
for c in w:
w_tmp.append(c)
if c not in cur.children:
ans.append(w)
break
else:
cur = cur.children[c]
if cur.end:
ans.append(''.join(w_tmp))
break
else:
ans.append(w)
return ' '.join(ans)
|
<commit_before><commit_msg>Add py solution for 648. Replace Words
648. Replace Words: https://leetcode.com/problems/replace-words/<commit_after>from collections import defaultdict
class Node(object):
def __init__(self):
self.children = defaultdict(Node)
self.end = False
class Solution(object):
def replaceWords(self, dict, sentence):
"""
:type dict: List[str]
:type sentence: str
:rtype: str
"""
root = Node()
for w in dict:
cur = root
for c in w:
cur = cur.children[c]
cur.end = True
ans = []
for w in sentence.split():
w_tmp = []
cur = root
for c in w:
w_tmp.append(c)
if c not in cur.children:
ans.append(w)
break
else:
cur = cur.children[c]
if cur.end:
ans.append(''.join(w_tmp))
break
else:
ans.append(w)
return ' '.join(ans)
|
|
baf7b88f61814e1db34375a71007648d79f14e1d
|
scripts/python/cleanSimulation.py
|
scripts/python/cleanSimulation.py
|
#!/usr/bin/env/ python
# file: cleanSimulation.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Clean a PetIBM simulation.
import os
import argparse
def read_inputs():
"""Parses the command-line."""
# create parser
parser = argparse.ArgumentParser(description='Clean PetIBM case',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--case', dest='case_directory', type=str,
default=os.getcwd(),
help='directory of the PetIBM simulation')
parser.add_argument('--no-images', dest='images', action='store_false',
help='does not remove the images folder')
parser.add_argument('--no-data', dest='data', action='store_false',
help='does not remove the data folder')
parser.add_argument('--no-grid', dest='grid', action='store_false',
help='does not remove the grid file')
parser.add_argument('--no-solutions', dest='solutions', action='store_false',
help='does not remove the numrical solution folders')
parser.add_argument('--no-forces', dest='forces', action='store_false',
help='does not remove the forces data file')
parser.add_argument('--no-logs', dest='logs', action='store_false',
help='does not remove log files '
'(iterationCount, performanceSummary)')
parser.set_defaults(images=True, data=True, grid=True, solutions=True,
forces=True, logs=True)
return parser.parse_args()
def main():
"""Cleans a PetIBM simulation."""
# parser command-line
args = read_inputs()
# get different parts to clean
parts = {}
if args.images:
parts['images'] = '%s/images' % args.case_directory
if args.data:
parts['data'] = '%s/data' % args.case_directory
if args.grid:
parts['grid'] = '%s/grid.txt' % args.case_directory
if args.solutions:
parts['solutions'] = '%s/0*' % args.case_directory
if args.forces:
parts['forces'] = '%s/forces.txt' % args.case_directory
if args.logs:
parts['logs'] = ('%s/iterationCount.txt %s/performanceSummary.txt'
% (args.case_directory, args.case_directory))
# remove appropriate files/folders
print '[case-directory] %s' % args.case_directory
for key, part in parts.iteritems():
print '\t--> removing %s ...' % key
os.system('rm -rf %s' % part)
if __name__ == '__main__':
main()
|
Add script to clean a given PetIBM simulation
|
Add script to clean a given PetIBM simulation
|
Python
|
bsd-3-clause
|
mesnardo/PetIBM,barbagroup/PetIBM,mesnardo/PetIBM,mesnardo/PetIBM,piyueh/PetIBM
|
Add script to clean a given PetIBM simulation
|
#!/usr/bin/env/ python
# file: cleanSimulation.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Clean a PetIBM simulation.
import os
import argparse
def read_inputs():
"""Parses the command-line."""
# create parser
parser = argparse.ArgumentParser(description='Clean PetIBM case',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--case', dest='case_directory', type=str,
default=os.getcwd(),
help='directory of the PetIBM simulation')
parser.add_argument('--no-images', dest='images', action='store_false',
help='does not remove the images folder')
parser.add_argument('--no-data', dest='data', action='store_false',
help='does not remove the data folder')
parser.add_argument('--no-grid', dest='grid', action='store_false',
help='does not remove the grid file')
parser.add_argument('--no-solutions', dest='solutions', action='store_false',
help='does not remove the numrical solution folders')
parser.add_argument('--no-forces', dest='forces', action='store_false',
help='does not remove the forces data file')
parser.add_argument('--no-logs', dest='logs', action='store_false',
help='does not remove log files '
'(iterationCount, performanceSummary)')
parser.set_defaults(images=True, data=True, grid=True, solutions=True,
forces=True, logs=True)
return parser.parse_args()
def main():
"""Cleans a PetIBM simulation."""
# parser command-line
args = read_inputs()
# get different parts to clean
parts = {}
if args.images:
parts['images'] = '%s/images' % args.case_directory
if args.data:
parts['data'] = '%s/data' % args.case_directory
if args.grid:
parts['grid'] = '%s/grid.txt' % args.case_directory
if args.solutions:
parts['solutions'] = '%s/0*' % args.case_directory
if args.forces:
parts['forces'] = '%s/forces.txt' % args.case_directory
if args.logs:
parts['logs'] = ('%s/iterationCount.txt %s/performanceSummary.txt'
% (args.case_directory, args.case_directory))
# remove appropriate files/folders
print '[case-directory] %s' % args.case_directory
for key, part in parts.iteritems():
print '\t--> removing %s ...' % key
os.system('rm -rf %s' % part)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to clean a given PetIBM simulation<commit_after>
|
#!/usr/bin/env/ python
# file: cleanSimulation.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Clean a PetIBM simulation.
import os
import argparse
def read_inputs():
"""Parses the command-line."""
# create parser
parser = argparse.ArgumentParser(description='Clean PetIBM case',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--case', dest='case_directory', type=str,
default=os.getcwd(),
help='directory of the PetIBM simulation')
parser.add_argument('--no-images', dest='images', action='store_false',
help='does not remove the images folder')
parser.add_argument('--no-data', dest='data', action='store_false',
help='does not remove the data folder')
parser.add_argument('--no-grid', dest='grid', action='store_false',
help='does not remove the grid file')
parser.add_argument('--no-solutions', dest='solutions', action='store_false',
help='does not remove the numrical solution folders')
parser.add_argument('--no-forces', dest='forces', action='store_false',
help='does not remove the forces data file')
parser.add_argument('--no-logs', dest='logs', action='store_false',
help='does not remove log files '
'(iterationCount, performanceSummary)')
parser.set_defaults(images=True, data=True, grid=True, solutions=True,
forces=True, logs=True)
return parser.parse_args()
def main():
"""Cleans a PetIBM simulation."""
# parser command-line
args = read_inputs()
# get different parts to clean
parts = {}
if args.images:
parts['images'] = '%s/images' % args.case_directory
if args.data:
parts['data'] = '%s/data' % args.case_directory
if args.grid:
parts['grid'] = '%s/grid.txt' % args.case_directory
if args.solutions:
parts['solutions'] = '%s/0*' % args.case_directory
if args.forces:
parts['forces'] = '%s/forces.txt' % args.case_directory
if args.logs:
parts['logs'] = ('%s/iterationCount.txt %s/performanceSummary.txt'
% (args.case_directory, args.case_directory))
# remove appropriate files/folders
print '[case-directory] %s' % args.case_directory
for key, part in parts.iteritems():
print '\t--> removing %s ...' % key
os.system('rm -rf %s' % part)
if __name__ == '__main__':
main()
|
Add script to clean a given PetIBM simulation#!/usr/bin/env/ python
# file: cleanSimulation.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Clean a PetIBM simulation.
import os
import argparse
def read_inputs():
"""Parses the command-line."""
# create parser
parser = argparse.ArgumentParser(description='Clean PetIBM case',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--case', dest='case_directory', type=str,
default=os.getcwd(),
help='directory of the PetIBM simulation')
parser.add_argument('--no-images', dest='images', action='store_false',
help='does not remove the images folder')
parser.add_argument('--no-data', dest='data', action='store_false',
help='does not remove the data folder')
parser.add_argument('--no-grid', dest='grid', action='store_false',
help='does not remove the grid file')
parser.add_argument('--no-solutions', dest='solutions', action='store_false',
help='does not remove the numrical solution folders')
parser.add_argument('--no-forces', dest='forces', action='store_false',
help='does not remove the forces data file')
parser.add_argument('--no-logs', dest='logs', action='store_false',
help='does not remove log files '
'(iterationCount, performanceSummary)')
parser.set_defaults(images=True, data=True, grid=True, solutions=True,
forces=True, logs=True)
return parser.parse_args()
def main():
"""Cleans a PetIBM simulation."""
# parser command-line
args = read_inputs()
# get different parts to clean
parts = {}
if args.images:
parts['images'] = '%s/images' % args.case_directory
if args.data:
parts['data'] = '%s/data' % args.case_directory
if args.grid:
parts['grid'] = '%s/grid.txt' % args.case_directory
if args.solutions:
parts['solutions'] = '%s/0*' % args.case_directory
if args.forces:
parts['forces'] = '%s/forces.txt' % args.case_directory
if args.logs:
parts['logs'] = ('%s/iterationCount.txt %s/performanceSummary.txt'
% (args.case_directory, args.case_directory))
# remove appropriate files/folders
print '[case-directory] %s' % args.case_directory
for key, part in parts.iteritems():
print '\t--> removing %s ...' % key
os.system('rm -rf %s' % part)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to clean a given PetIBM simulation<commit_after>#!/usr/bin/env/ python
# file: cleanSimulation.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Clean a PetIBM simulation.
import os
import argparse
def read_inputs():
"""Parses the command-line."""
# create parser
parser = argparse.ArgumentParser(description='Clean PetIBM case',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--case', dest='case_directory', type=str,
default=os.getcwd(),
help='directory of the PetIBM simulation')
parser.add_argument('--no-images', dest='images', action='store_false',
help='does not remove the images folder')
parser.add_argument('--no-data', dest='data', action='store_false',
help='does not remove the data folder')
parser.add_argument('--no-grid', dest='grid', action='store_false',
help='does not remove the grid file')
parser.add_argument('--no-solutions', dest='solutions', action='store_false',
help='does not remove the numrical solution folders')
parser.add_argument('--no-forces', dest='forces', action='store_false',
help='does not remove the forces data file')
parser.add_argument('--no-logs', dest='logs', action='store_false',
help='does not remove log files '
'(iterationCount, performanceSummary)')
parser.set_defaults(images=True, data=True, grid=True, solutions=True,
forces=True, logs=True)
return parser.parse_args()
def main():
"""Cleans a PetIBM simulation."""
# parser command-line
args = read_inputs()
# get different parts to clean
parts = {}
if args.images:
parts['images'] = '%s/images' % args.case_directory
if args.data:
parts['data'] = '%s/data' % args.case_directory
if args.grid:
parts['grid'] = '%s/grid.txt' % args.case_directory
if args.solutions:
parts['solutions'] = '%s/0*' % args.case_directory
if args.forces:
parts['forces'] = '%s/forces.txt' % args.case_directory
if args.logs:
parts['logs'] = ('%s/iterationCount.txt %s/performanceSummary.txt'
% (args.case_directory, args.case_directory))
# remove appropriate files/folders
print '[case-directory] %s' % args.case_directory
for key, part in parts.iteritems():
print '\t--> removing %s ...' % key
os.system('rm -rf %s' % part)
if __name__ == '__main__':
main()
|
|
d408a48b126e270575c3d85a8f3748afa46d18a1
|
scripts/fixtures.py
|
scripts/fixtures.py
|
import datetime
import json
import requests
import kinto_http
def buildid2iso(buildid):
return datetime.datetime.strptime(buildid, "%Y%m%d%H%M%S").isoformat()
def main():
client = kinto_http.Client(server_url="https://kinto-ota.dev.mozaws.net/v1",
bucket="build-hub",
collection="fixtures",
auth=("user", "pass"))
records = client.get_records(bucket="systemaddons", collection="versions")
client.create_collection(if_not_exists=True)
for record in records:
buildid = record["release"]["buildId"]
url = record["release"]["url"]
resp = requests.head(url)
size = int(resp.headers.get("Content-Length", 0))
mimetype = resp.headers.get("Content-Type")
addons_by_ids = {}
for addon in record["builtins"]:
addons_by_ids[addon["id"]] = {"id": addon["id"], "builtin": addon["version"]}
for addon in record["updates"]:
addons_by_ids.setdefault(addon["id"], {}).update({"id": addon["id"], "updated": addon["version"]})
systemaddons = list(addons_by_ids.values())
tree = {
"beta": "mozilla-beta",
"aurora": "mozilla-aurora",
"release": "mozilla-release",
"nightly": "mozilla-central"
}[record["release"]["channel"]]
fixture = {
"build": {
"id": buildid,
"date": buildid2iso(buildid),
"type": "opt"
},
"source": {
"revision": record["id"], # fake rev!
"tree": tree,
"product": "firefox",
},
"target": {
"platform": record["release"]["target"],
"locale": record["release"]["lang"],
"version": record["release"]["version"],
"channel": record["release"]["channel"],
},
"download": {
"url": url,
"mimetype": mimetype,
"size": size,
},
"systemaddons": systemaddons
}
client.create_record(fixture)
if __name__ == "__main__":
main()
|
Add quick and dirty script
|
Add quick and dirty script
|
Python
|
mpl-2.0
|
mozilla-services/buildhub,mozilla-services/buildhub,mozilla-services/buildhub,mozilla-services/buildhub
|
Add quick and dirty script
|
import datetime
import json
import requests
import kinto_http
def buildid2iso(buildid):
return datetime.datetime.strptime(buildid, "%Y%m%d%H%M%S").isoformat()
def main():
client = kinto_http.Client(server_url="https://kinto-ota.dev.mozaws.net/v1",
bucket="build-hub",
collection="fixtures",
auth=("user", "pass"))
records = client.get_records(bucket="systemaddons", collection="versions")
client.create_collection(if_not_exists=True)
for record in records:
buildid = record["release"]["buildId"]
url = record["release"]["url"]
resp = requests.head(url)
size = int(resp.headers.get("Content-Length", 0))
mimetype = resp.headers.get("Content-Type")
addons_by_ids = {}
for addon in record["builtins"]:
addons_by_ids[addon["id"]] = {"id": addon["id"], "builtin": addon["version"]}
for addon in record["updates"]:
addons_by_ids.setdefault(addon["id"], {}).update({"id": addon["id"], "updated": addon["version"]})
systemaddons = list(addons_by_ids.values())
tree = {
"beta": "mozilla-beta",
"aurora": "mozilla-aurora",
"release": "mozilla-release",
"nightly": "mozilla-central"
}[record["release"]["channel"]]
fixture = {
"build": {
"id": buildid,
"date": buildid2iso(buildid),
"type": "opt"
},
"source": {
"revision": record["id"], # fake rev!
"tree": tree,
"product": "firefox",
},
"target": {
"platform": record["release"]["target"],
"locale": record["release"]["lang"],
"version": record["release"]["version"],
"channel": record["release"]["channel"],
},
"download": {
"url": url,
"mimetype": mimetype,
"size": size,
},
"systemaddons": systemaddons
}
client.create_record(fixture)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add quick and dirty script<commit_after>
|
import datetime
import json
import requests
import kinto_http
def buildid2iso(buildid):
return datetime.datetime.strptime(buildid, "%Y%m%d%H%M%S").isoformat()
def main():
client = kinto_http.Client(server_url="https://kinto-ota.dev.mozaws.net/v1",
bucket="build-hub",
collection="fixtures",
auth=("user", "pass"))
records = client.get_records(bucket="systemaddons", collection="versions")
client.create_collection(if_not_exists=True)
for record in records:
buildid = record["release"]["buildId"]
url = record["release"]["url"]
resp = requests.head(url)
size = int(resp.headers.get("Content-Length", 0))
mimetype = resp.headers.get("Content-Type")
addons_by_ids = {}
for addon in record["builtins"]:
addons_by_ids[addon["id"]] = {"id": addon["id"], "builtin": addon["version"]}
for addon in record["updates"]:
addons_by_ids.setdefault(addon["id"], {}).update({"id": addon["id"], "updated": addon["version"]})
systemaddons = list(addons_by_ids.values())
tree = {
"beta": "mozilla-beta",
"aurora": "mozilla-aurora",
"release": "mozilla-release",
"nightly": "mozilla-central"
}[record["release"]["channel"]]
fixture = {
"build": {
"id": buildid,
"date": buildid2iso(buildid),
"type": "opt"
},
"source": {
"revision": record["id"], # fake rev!
"tree": tree,
"product": "firefox",
},
"target": {
"platform": record["release"]["target"],
"locale": record["release"]["lang"],
"version": record["release"]["version"],
"channel": record["release"]["channel"],
},
"download": {
"url": url,
"mimetype": mimetype,
"size": size,
},
"systemaddons": systemaddons
}
client.create_record(fixture)
if __name__ == "__main__":
main()
|
Add quick and dirty scriptimport datetime
import json
import requests
import kinto_http
def buildid2iso(buildid):
return datetime.datetime.strptime(buildid, "%Y%m%d%H%M%S").isoformat()
def main():
client = kinto_http.Client(server_url="https://kinto-ota.dev.mozaws.net/v1",
bucket="build-hub",
collection="fixtures",
auth=("user", "pass"))
records = client.get_records(bucket="systemaddons", collection="versions")
client.create_collection(if_not_exists=True)
for record in records:
buildid = record["release"]["buildId"]
url = record["release"]["url"]
resp = requests.head(url)
size = int(resp.headers.get("Content-Length", 0))
mimetype = resp.headers.get("Content-Type")
addons_by_ids = {}
for addon in record["builtins"]:
addons_by_ids[addon["id"]] = {"id": addon["id"], "builtin": addon["version"]}
for addon in record["updates"]:
addons_by_ids.setdefault(addon["id"], {}).update({"id": addon["id"], "updated": addon["version"]})
systemaddons = list(addons_by_ids.values())
tree = {
"beta": "mozilla-beta",
"aurora": "mozilla-aurora",
"release": "mozilla-release",
"nightly": "mozilla-central"
}[record["release"]["channel"]]
fixture = {
"build": {
"id": buildid,
"date": buildid2iso(buildid),
"type": "opt"
},
"source": {
"revision": record["id"], # fake rev!
"tree": tree,
"product": "firefox",
},
"target": {
"platform": record["release"]["target"],
"locale": record["release"]["lang"],
"version": record["release"]["version"],
"channel": record["release"]["channel"],
},
"download": {
"url": url,
"mimetype": mimetype,
"size": size,
},
"systemaddons": systemaddons
}
client.create_record(fixture)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add quick and dirty script<commit_after>import datetime
import json
import requests
import kinto_http
def buildid2iso(buildid):
return datetime.datetime.strptime(buildid, "%Y%m%d%H%M%S").isoformat()
def main():
client = kinto_http.Client(server_url="https://kinto-ota.dev.mozaws.net/v1",
bucket="build-hub",
collection="fixtures",
auth=("user", "pass"))
records = client.get_records(bucket="systemaddons", collection="versions")
client.create_collection(if_not_exists=True)
for record in records:
buildid = record["release"]["buildId"]
url = record["release"]["url"]
resp = requests.head(url)
size = int(resp.headers.get("Content-Length", 0))
mimetype = resp.headers.get("Content-Type")
addons_by_ids = {}
for addon in record["builtins"]:
addons_by_ids[addon["id"]] = {"id": addon["id"], "builtin": addon["version"]}
for addon in record["updates"]:
addons_by_ids.setdefault(addon["id"], {}).update({"id": addon["id"], "updated": addon["version"]})
systemaddons = list(addons_by_ids.values())
tree = {
"beta": "mozilla-beta",
"aurora": "mozilla-aurora",
"release": "mozilla-release",
"nightly": "mozilla-central"
}[record["release"]["channel"]]
fixture = {
"build": {
"id": buildid,
"date": buildid2iso(buildid),
"type": "opt"
},
"source": {
"revision": record["id"], # fake rev!
"tree": tree,
"product": "firefox",
},
"target": {
"platform": record["release"]["target"],
"locale": record["release"]["lang"],
"version": record["release"]["version"],
"channel": record["release"]["channel"],
},
"download": {
"url": url,
"mimetype": mimetype,
"size": size,
},
"systemaddons": systemaddons
}
client.create_record(fixture)
if __name__ == "__main__":
main()
|
|
c94cb620292f93a4cd3cfc0bb57c5fa38d95a717
|
kafka/cluster.py
|
kafka/cluster.py
|
import logging
import random
from .conn import BrokerConnection, collect_hosts
from .protocol.metadata import MetadataRequest
logger = logging.getLogger(__name__)
class Cluster(object):
def __init__(self, **kwargs):
if 'bootstrap_servers' not in kwargs:
kargs['bootstrap_servers'] = 'localhost'
self._brokers = {}
self._topics = {}
self._groups = {}
self._bootstrap(collect_hosts(kwargs['bootstrap_servers']),
timeout=kwargs.get('bootstrap_timeout', 2))
def brokers(self):
brokers = list(self._brokers.values())
return random.sample(brokers, len(brokers))
def random_broker(self):
for broker in self.brokers():
if broker.connected() or broker.connect():
return broker
return None
def broker_by_id(self, broker_id):
return self._brokers.get(broker_id)
def topics(self):
return list(self._topics.keys())
def partitions_for_topic(self, topic):
if topic not in self._topics:
return None
return list(self._topics[topic].keys())
def broker_for_partition(self, topic, partition):
if topic not in self._topics or partition not in self._topics[topic]:
return None
broker_id = self._topics[topic][partition]
return self.broker_by_id(broker_id)
def refresh_metadata(self):
broker = self.random_broker()
if not broker.send(MetadataRequest([])):
return None
metadata = broker.recv()
if not metadata:
return None
self._update_metadata(metadata)
return metadata
def _update_metadata(self, metadata):
self._brokers.update({
node_id: BrokerConnection(host, port)
for node_id, host, port in metadata.brokers
if node_id not in self._brokers
})
self._topics = {
topic: {
partition: leader
for _, partition, leader, _, _ in partitions
}
for _, topic, partitions in metadata.topics
}
def _bootstrap(self, hosts, timeout=2):
for host, port in hosts:
conn = BrokerConnection(host, port, timeout)
if not conn.connect():
continue
self._brokers['bootstrap'] = conn
if self.refresh_metadata():
break
else:
raise ValueError("Could not bootstrap kafka cluster from %s" % hosts)
if len(self._brokers) > 1:
self._brokers.pop('bootstrap')
conn.close()
def __str__(self):
return 'Cluster(brokers: %d, topics: %d, groups: %d)' % \
(len(self._brokers), len(self._topics), len(self._groups))
|
Add simple Cluster class to manage broker metadata
|
Add simple Cluster class to manage broker metadata
|
Python
|
apache-2.0
|
Yelp/kafka-python,zackdever/kafka-python,wikimedia/operations-debs-python-kafka,DataDog/kafka-python,scrapinghub/kafka-python,dpkp/kafka-python,scrapinghub/kafka-python,ohmu/kafka-python,mumrah/kafka-python,dpkp/kafka-python,mumrah/kafka-python,gamechanger/kafka-python,zackdever/kafka-python,gamechanger/kafka-python,ohmu/kafka-python,Aloomaio/kafka-python,wikimedia/operations-debs-python-kafka,Yelp/kafka-python,Aloomaio/kafka-python
|
Add simple Cluster class to manage broker metadata
|
import logging
import random
from .conn import BrokerConnection, collect_hosts
from .protocol.metadata import MetadataRequest
logger = logging.getLogger(__name__)
class Cluster(object):
def __init__(self, **kwargs):
if 'bootstrap_servers' not in kwargs:
kargs['bootstrap_servers'] = 'localhost'
self._brokers = {}
self._topics = {}
self._groups = {}
self._bootstrap(collect_hosts(kwargs['bootstrap_servers']),
timeout=kwargs.get('bootstrap_timeout', 2))
def brokers(self):
brokers = list(self._brokers.values())
return random.sample(brokers, len(brokers))
def random_broker(self):
for broker in self.brokers():
if broker.connected() or broker.connect():
return broker
return None
def broker_by_id(self, broker_id):
return self._brokers.get(broker_id)
def topics(self):
return list(self._topics.keys())
def partitions_for_topic(self, topic):
if topic not in self._topics:
return None
return list(self._topics[topic].keys())
def broker_for_partition(self, topic, partition):
if topic not in self._topics or partition not in self._topics[topic]:
return None
broker_id = self._topics[topic][partition]
return self.broker_by_id(broker_id)
def refresh_metadata(self):
broker = self.random_broker()
if not broker.send(MetadataRequest([])):
return None
metadata = broker.recv()
if not metadata:
return None
self._update_metadata(metadata)
return metadata
def _update_metadata(self, metadata):
self._brokers.update({
node_id: BrokerConnection(host, port)
for node_id, host, port in metadata.brokers
if node_id not in self._brokers
})
self._topics = {
topic: {
partition: leader
for _, partition, leader, _, _ in partitions
}
for _, topic, partitions in metadata.topics
}
def _bootstrap(self, hosts, timeout=2):
for host, port in hosts:
conn = BrokerConnection(host, port, timeout)
if not conn.connect():
continue
self._brokers['bootstrap'] = conn
if self.refresh_metadata():
break
else:
raise ValueError("Could not bootstrap kafka cluster from %s" % hosts)
if len(self._brokers) > 1:
self._brokers.pop('bootstrap')
conn.close()
def __str__(self):
return 'Cluster(brokers: %d, topics: %d, groups: %d)' % \
(len(self._brokers), len(self._topics), len(self._groups))
|
<commit_before><commit_msg>Add simple Cluster class to manage broker metadata<commit_after>
|
import logging
import random
from .conn import BrokerConnection, collect_hosts
from .protocol.metadata import MetadataRequest
logger = logging.getLogger(__name__)
class Cluster(object):
def __init__(self, **kwargs):
if 'bootstrap_servers' not in kwargs:
kargs['bootstrap_servers'] = 'localhost'
self._brokers = {}
self._topics = {}
self._groups = {}
self._bootstrap(collect_hosts(kwargs['bootstrap_servers']),
timeout=kwargs.get('bootstrap_timeout', 2))
def brokers(self):
brokers = list(self._brokers.values())
return random.sample(brokers, len(brokers))
def random_broker(self):
for broker in self.brokers():
if broker.connected() or broker.connect():
return broker
return None
def broker_by_id(self, broker_id):
return self._brokers.get(broker_id)
def topics(self):
return list(self._topics.keys())
def partitions_for_topic(self, topic):
if topic not in self._topics:
return None
return list(self._topics[topic].keys())
def broker_for_partition(self, topic, partition):
if topic not in self._topics or partition not in self._topics[topic]:
return None
broker_id = self._topics[topic][partition]
return self.broker_by_id(broker_id)
def refresh_metadata(self):
broker = self.random_broker()
if not broker.send(MetadataRequest([])):
return None
metadata = broker.recv()
if not metadata:
return None
self._update_metadata(metadata)
return metadata
def _update_metadata(self, metadata):
self._brokers.update({
node_id: BrokerConnection(host, port)
for node_id, host, port in metadata.brokers
if node_id not in self._brokers
})
self._topics = {
topic: {
partition: leader
for _, partition, leader, _, _ in partitions
}
for _, topic, partitions in metadata.topics
}
def _bootstrap(self, hosts, timeout=2):
for host, port in hosts:
conn = BrokerConnection(host, port, timeout)
if not conn.connect():
continue
self._brokers['bootstrap'] = conn
if self.refresh_metadata():
break
else:
raise ValueError("Could not bootstrap kafka cluster from %s" % hosts)
if len(self._brokers) > 1:
self._brokers.pop('bootstrap')
conn.close()
def __str__(self):
return 'Cluster(brokers: %d, topics: %d, groups: %d)' % \
(len(self._brokers), len(self._topics), len(self._groups))
|
Add simple Cluster class to manage broker metadataimport logging
import random
from .conn import BrokerConnection, collect_hosts
from .protocol.metadata import MetadataRequest
logger = logging.getLogger(__name__)
class Cluster(object):
def __init__(self, **kwargs):
if 'bootstrap_servers' not in kwargs:
kargs['bootstrap_servers'] = 'localhost'
self._brokers = {}
self._topics = {}
self._groups = {}
self._bootstrap(collect_hosts(kwargs['bootstrap_servers']),
timeout=kwargs.get('bootstrap_timeout', 2))
def brokers(self):
brokers = list(self._brokers.values())
return random.sample(brokers, len(brokers))
def random_broker(self):
for broker in self.brokers():
if broker.connected() or broker.connect():
return broker
return None
def broker_by_id(self, broker_id):
return self._brokers.get(broker_id)
def topics(self):
return list(self._topics.keys())
def partitions_for_topic(self, topic):
if topic not in self._topics:
return None
return list(self._topics[topic].keys())
def broker_for_partition(self, topic, partition):
if topic not in self._topics or partition not in self._topics[topic]:
return None
broker_id = self._topics[topic][partition]
return self.broker_by_id(broker_id)
def refresh_metadata(self):
broker = self.random_broker()
if not broker.send(MetadataRequest([])):
return None
metadata = broker.recv()
if not metadata:
return None
self._update_metadata(metadata)
return metadata
def _update_metadata(self, metadata):
self._brokers.update({
node_id: BrokerConnection(host, port)
for node_id, host, port in metadata.brokers
if node_id not in self._brokers
})
self._topics = {
topic: {
partition: leader
for _, partition, leader, _, _ in partitions
}
for _, topic, partitions in metadata.topics
}
def _bootstrap(self, hosts, timeout=2):
for host, port in hosts:
conn = BrokerConnection(host, port, timeout)
if not conn.connect():
continue
self._brokers['bootstrap'] = conn
if self.refresh_metadata():
break
else:
raise ValueError("Could not bootstrap kafka cluster from %s" % hosts)
if len(self._brokers) > 1:
self._brokers.pop('bootstrap')
conn.close()
def __str__(self):
return 'Cluster(brokers: %d, topics: %d, groups: %d)' % \
(len(self._brokers), len(self._topics), len(self._groups))
|
<commit_before><commit_msg>Add simple Cluster class to manage broker metadata<commit_after>import logging
import random
from .conn import BrokerConnection, collect_hosts
from .protocol.metadata import MetadataRequest
logger = logging.getLogger(__name__)
class Cluster(object):
def __init__(self, **kwargs):
if 'bootstrap_servers' not in kwargs:
kargs['bootstrap_servers'] = 'localhost'
self._brokers = {}
self._topics = {}
self._groups = {}
self._bootstrap(collect_hosts(kwargs['bootstrap_servers']),
timeout=kwargs.get('bootstrap_timeout', 2))
def brokers(self):
brokers = list(self._brokers.values())
return random.sample(brokers, len(brokers))
def random_broker(self):
for broker in self.brokers():
if broker.connected() or broker.connect():
return broker
return None
def broker_by_id(self, broker_id):
return self._brokers.get(broker_id)
def topics(self):
return list(self._topics.keys())
def partitions_for_topic(self, topic):
if topic not in self._topics:
return None
return list(self._topics[topic].keys())
def broker_for_partition(self, topic, partition):
if topic not in self._topics or partition not in self._topics[topic]:
return None
broker_id = self._topics[topic][partition]
return self.broker_by_id(broker_id)
def refresh_metadata(self):
broker = self.random_broker()
if not broker.send(MetadataRequest([])):
return None
metadata = broker.recv()
if not metadata:
return None
self._update_metadata(metadata)
return metadata
def _update_metadata(self, metadata):
self._brokers.update({
node_id: BrokerConnection(host, port)
for node_id, host, port in metadata.brokers
if node_id not in self._brokers
})
self._topics = {
topic: {
partition: leader
for _, partition, leader, _, _ in partitions
}
for _, topic, partitions in metadata.topics
}
def _bootstrap(self, hosts, timeout=2):
for host, port in hosts:
conn = BrokerConnection(host, port, timeout)
if not conn.connect():
continue
self._brokers['bootstrap'] = conn
if self.refresh_metadata():
break
else:
raise ValueError("Could not bootstrap kafka cluster from %s" % hosts)
if len(self._brokers) > 1:
self._brokers.pop('bootstrap')
conn.close()
def __str__(self):
return 'Cluster(brokers: %d, topics: %d, groups: %d)' % \
(len(self._brokers), len(self._topics), len(self._groups))
|
|
03b4ffb6f0a2badc15b1312fcf8fe737e77bcf86
|
test-http-client.py
|
test-http-client.py
|
try:
import config_local as config
except:
import config
parts = config.url.split('/')
url_base = parts[2]
if len(parts) > 3:
url_path = '/' + '/'.join(parts[3:])
print(url_base, url_path)
import http.client
conn = http.client.HTTPConnection(url_base)
conn.request('GET', url_path)
response = conn.getresponse()
print(response.read())
|
Add test of Python http.client
|
Add test of Python http.client
|
Python
|
unlicense
|
mloskot/http-url-test,mloskot/http-url-test,mloskot/http-url-test,mloskot/http-url-test
|
Add test of Python http.client
|
try:
import config_local as config
except:
import config
parts = config.url.split('/')
url_base = parts[2]
if len(parts) > 3:
url_path = '/' + '/'.join(parts[3:])
print(url_base, url_path)
import http.client
conn = http.client.HTTPConnection(url_base)
conn.request('GET', url_path)
response = conn.getresponse()
print(response.read())
|
<commit_before><commit_msg>Add test of Python http.client<commit_after>
|
try:
import config_local as config
except:
import config
parts = config.url.split('/')
url_base = parts[2]
if len(parts) > 3:
url_path = '/' + '/'.join(parts[3:])
print(url_base, url_path)
import http.client
conn = http.client.HTTPConnection(url_base)
conn.request('GET', url_path)
response = conn.getresponse()
print(response.read())
|
Add test of Python http.clienttry:
import config_local as config
except:
import config
parts = config.url.split('/')
url_base = parts[2]
if len(parts) > 3:
url_path = '/' + '/'.join(parts[3:])
print(url_base, url_path)
import http.client
conn = http.client.HTTPConnection(url_base)
conn.request('GET', url_path)
response = conn.getresponse()
print(response.read())
|
<commit_before><commit_msg>Add test of Python http.client<commit_after>try:
import config_local as config
except:
import config
parts = config.url.split('/')
url_base = parts[2]
if len(parts) > 3:
url_path = '/' + '/'.join(parts[3:])
print(url_base, url_path)
import http.client
conn = http.client.HTTPConnection(url_base)
conn.request('GET', url_path)
response = conn.getresponse()
print(response.read())
|
|
eca7f6a35d3f95352096a455ace91ef5cde51339
|
terminus/tests/city_model_test.py
|
terminus/tests/city_model_test.py
|
import unittest
import mock
from geometry.point import Point
from models.street import Street
from models.trunk import Trunk
from models.ground_plane import GroundPlane
from models.building import Building
from models.block import Block
from models.city import City
class CityModelTest(unittest.TestCase):
def test_street(self):
street = Street()
generator_mock = mock.Mock()
street.accept(generator_mock)
calls = [mock.call.start_street(street), mock.call.end_street(street)]
generator_mock.assert_has_calls(calls)
def test_trunk(self):
trunk = Trunk()
generator_mock = mock.Mock()
trunk.accept(generator_mock)
calls = [mock.call.start_trunk(trunk), mock.call.end_trunk(trunk)]
generator_mock.assert_has_calls(calls)
def test_ground_plane(self):
plane = GroundPlane(1, Point(0, 0, 0))
generator_mock = mock.Mock()
plane.accept(generator_mock)
calls = [mock.call.start_ground_plane(plane), mock.call.end_ground_plane(plane)]
generator_mock.assert_has_calls(calls)
def test_building(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
building = Building(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
building.accept(generator_mock)
calls = [mock.call.start_building(building), mock.call.end_building(building)]
generator_mock.assert_has_calls(calls)
def test_block(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
block = Block(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
block.accept(generator_mock)
calls = [mock.call.start_block(block), mock.call.end_block(block)]
generator_mock.assert_has_calls(calls)
def test_city(self):
city = City()
road = mock.Mock()
ground_plane = mock.Mock()
building = mock.Mock()
block = mock.Mock()
city.add_road(road)
city.add_building(building)
city.set_ground_plane(ground_plane)
city.add_block(block)
generator_mock = mock.Mock()
city.accept(generator_mock)
city_calls = [mock.call.start_city(city), mock.call.end_city(city)]
generator_mock.assert_has_calls(city_calls)
road.accept.assert_called()
ground_plane.accept.assert_called()
building.accept.assert_called()
block.accept.assert_called()
|
Add tests for city generation.
|
Add tests for city generation.
|
Python
|
apache-2.0
|
ekumenlabs/terminus,ekumenlabs/terminus
|
Add tests for city generation.
|
import unittest
import mock
from geometry.point import Point
from models.street import Street
from models.trunk import Trunk
from models.ground_plane import GroundPlane
from models.building import Building
from models.block import Block
from models.city import City
class CityModelTest(unittest.TestCase):
def test_street(self):
street = Street()
generator_mock = mock.Mock()
street.accept(generator_mock)
calls = [mock.call.start_street(street), mock.call.end_street(street)]
generator_mock.assert_has_calls(calls)
def test_trunk(self):
trunk = Trunk()
generator_mock = mock.Mock()
trunk.accept(generator_mock)
calls = [mock.call.start_trunk(trunk), mock.call.end_trunk(trunk)]
generator_mock.assert_has_calls(calls)
def test_ground_plane(self):
plane = GroundPlane(1, Point(0, 0, 0))
generator_mock = mock.Mock()
plane.accept(generator_mock)
calls = [mock.call.start_ground_plane(plane), mock.call.end_ground_plane(plane)]
generator_mock.assert_has_calls(calls)
def test_building(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
building = Building(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
building.accept(generator_mock)
calls = [mock.call.start_building(building), mock.call.end_building(building)]
generator_mock.assert_has_calls(calls)
def test_block(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
block = Block(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
block.accept(generator_mock)
calls = [mock.call.start_block(block), mock.call.end_block(block)]
generator_mock.assert_has_calls(calls)
def test_city(self):
city = City()
road = mock.Mock()
ground_plane = mock.Mock()
building = mock.Mock()
block = mock.Mock()
city.add_road(road)
city.add_building(building)
city.set_ground_plane(ground_plane)
city.add_block(block)
generator_mock = mock.Mock()
city.accept(generator_mock)
city_calls = [mock.call.start_city(city), mock.call.end_city(city)]
generator_mock.assert_has_calls(city_calls)
road.accept.assert_called()
ground_plane.accept.assert_called()
building.accept.assert_called()
block.accept.assert_called()
|
<commit_before><commit_msg>Add tests for city generation.<commit_after>
|
import unittest
import mock
from geometry.point import Point
from models.street import Street
from models.trunk import Trunk
from models.ground_plane import GroundPlane
from models.building import Building
from models.block import Block
from models.city import City
class CityModelTest(unittest.TestCase):
def test_street(self):
street = Street()
generator_mock = mock.Mock()
street.accept(generator_mock)
calls = [mock.call.start_street(street), mock.call.end_street(street)]
generator_mock.assert_has_calls(calls)
def test_trunk(self):
trunk = Trunk()
generator_mock = mock.Mock()
trunk.accept(generator_mock)
calls = [mock.call.start_trunk(trunk), mock.call.end_trunk(trunk)]
generator_mock.assert_has_calls(calls)
def test_ground_plane(self):
plane = GroundPlane(1, Point(0, 0, 0))
generator_mock = mock.Mock()
plane.accept(generator_mock)
calls = [mock.call.start_ground_plane(plane), mock.call.end_ground_plane(plane)]
generator_mock.assert_has_calls(calls)
def test_building(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
building = Building(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
building.accept(generator_mock)
calls = [mock.call.start_building(building), mock.call.end_building(building)]
generator_mock.assert_has_calls(calls)
def test_block(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
block = Block(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
block.accept(generator_mock)
calls = [mock.call.start_block(block), mock.call.end_block(block)]
generator_mock.assert_has_calls(calls)
def test_city(self):
city = City()
road = mock.Mock()
ground_plane = mock.Mock()
building = mock.Mock()
block = mock.Mock()
city.add_road(road)
city.add_building(building)
city.set_ground_plane(ground_plane)
city.add_block(block)
generator_mock = mock.Mock()
city.accept(generator_mock)
city_calls = [mock.call.start_city(city), mock.call.end_city(city)]
generator_mock.assert_has_calls(city_calls)
road.accept.assert_called()
ground_plane.accept.assert_called()
building.accept.assert_called()
block.accept.assert_called()
|
Add tests for city generation.import unittest
import mock
from geometry.point import Point
from models.street import Street
from models.trunk import Trunk
from models.ground_plane import GroundPlane
from models.building import Building
from models.block import Block
from models.city import City
class CityModelTest(unittest.TestCase):
def test_street(self):
street = Street()
generator_mock = mock.Mock()
street.accept(generator_mock)
calls = [mock.call.start_street(street), mock.call.end_street(street)]
generator_mock.assert_has_calls(calls)
def test_trunk(self):
trunk = Trunk()
generator_mock = mock.Mock()
trunk.accept(generator_mock)
calls = [mock.call.start_trunk(trunk), mock.call.end_trunk(trunk)]
generator_mock.assert_has_calls(calls)
def test_ground_plane(self):
plane = GroundPlane(1, Point(0, 0, 0))
generator_mock = mock.Mock()
plane.accept(generator_mock)
calls = [mock.call.start_ground_plane(plane), mock.call.end_ground_plane(plane)]
generator_mock.assert_has_calls(calls)
def test_building(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
building = Building(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
building.accept(generator_mock)
calls = [mock.call.start_building(building), mock.call.end_building(building)]
generator_mock.assert_has_calls(calls)
def test_block(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
block = Block(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
block.accept(generator_mock)
calls = [mock.call.start_block(block), mock.call.end_block(block)]
generator_mock.assert_has_calls(calls)
def test_city(self):
city = City()
road = mock.Mock()
ground_plane = mock.Mock()
building = mock.Mock()
block = mock.Mock()
city.add_road(road)
city.add_building(building)
city.set_ground_plane(ground_plane)
city.add_block(block)
generator_mock = mock.Mock()
city.accept(generator_mock)
city_calls = [mock.call.start_city(city), mock.call.end_city(city)]
generator_mock.assert_has_calls(city_calls)
road.accept.assert_called()
ground_plane.accept.assert_called()
building.accept.assert_called()
block.accept.assert_called()
|
<commit_before><commit_msg>Add tests for city generation.<commit_after>import unittest
import mock
from geometry.point import Point
from models.street import Street
from models.trunk import Trunk
from models.ground_plane import GroundPlane
from models.building import Building
from models.block import Block
from models.city import City
class CityModelTest(unittest.TestCase):
def test_street(self):
street = Street()
generator_mock = mock.Mock()
street.accept(generator_mock)
calls = [mock.call.start_street(street), mock.call.end_street(street)]
generator_mock.assert_has_calls(calls)
def test_trunk(self):
trunk = Trunk()
generator_mock = mock.Mock()
trunk.accept(generator_mock)
calls = [mock.call.start_trunk(trunk), mock.call.end_trunk(trunk)]
generator_mock.assert_has_calls(calls)
def test_ground_plane(self):
plane = GroundPlane(1, Point(0, 0, 0))
generator_mock = mock.Mock()
plane.accept(generator_mock)
calls = [mock.call.start_ground_plane(plane), mock.call.end_ground_plane(plane)]
generator_mock.assert_has_calls(calls)
def test_building(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
building = Building(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
building.accept(generator_mock)
calls = [mock.call.start_building(building), mock.call.end_building(building)]
generator_mock.assert_has_calls(calls)
def test_block(self):
vertices = [Point(0, 0, 0), Point(0, 1, 0), Point(1, 0, 0)]
block = Block(Point(0, 0, 0), vertices)
generator_mock = mock.Mock()
block.accept(generator_mock)
calls = [mock.call.start_block(block), mock.call.end_block(block)]
generator_mock.assert_has_calls(calls)
def test_city(self):
city = City()
road = mock.Mock()
ground_plane = mock.Mock()
building = mock.Mock()
block = mock.Mock()
city.add_road(road)
city.add_building(building)
city.set_ground_plane(ground_plane)
city.add_block(block)
generator_mock = mock.Mock()
city.accept(generator_mock)
city_calls = [mock.call.start_city(city), mock.call.end_city(city)]
generator_mock.assert_has_calls(city_calls)
road.accept.assert_called()
ground_plane.accept.assert_called()
building.accept.assert_called()
block.accept.assert_called()
|
|
61d4ed5dff415350601d2850c0b7e23b3a377657
|
tests/pytests/unit/test_minion.py
|
tests/pytests/unit/test_minion.py
|
import salt.minion
from tests.support.mock import patch
def test_minion_grains_in_opts():
"""
Minion does not generate grains when they are already in opts
"""
opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] == opts["grains"]
grainsfunc.assert_not_called()
def test_minoin_grains_not_in_opts():
"""
Minion generates grains when they are not already in opts
"""
opts = {"random_startup_delay": 0, "grains": {}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] != {}
grainsfunc.assert_called()
|
Add tests for minion opts in grains
|
Add tests for minion opts in grains
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add tests for minion opts in grains
|
import salt.minion
from tests.support.mock import patch
def test_minion_grains_in_opts():
"""
Minion does not generate grains when they are already in opts
"""
opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] == opts["grains"]
grainsfunc.assert_not_called()
def test_minoin_grains_not_in_opts():
"""
Minion generates grains when they are not already in opts
"""
opts = {"random_startup_delay": 0, "grains": {}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] != {}
grainsfunc.assert_called()
|
<commit_before><commit_msg>Add tests for minion opts in grains<commit_after>
|
import salt.minion
from tests.support.mock import patch
def test_minion_grains_in_opts():
"""
Minion does not generate grains when they are already in opts
"""
opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] == opts["grains"]
grainsfunc.assert_not_called()
def test_minoin_grains_not_in_opts():
"""
Minion generates grains when they are not already in opts
"""
opts = {"random_startup_delay": 0, "grains": {}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] != {}
grainsfunc.assert_called()
|
Add tests for minion opts in grainsimport salt.minion
from tests.support.mock import patch
def test_minion_grains_in_opts():
"""
Minion does not generate grains when they are already in opts
"""
opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] == opts["grains"]
grainsfunc.assert_not_called()
def test_minoin_grains_not_in_opts():
"""
Minion generates grains when they are not already in opts
"""
opts = {"random_startup_delay": 0, "grains": {}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] != {}
grainsfunc.assert_called()
|
<commit_before><commit_msg>Add tests for minion opts in grains<commit_after>import salt.minion
from tests.support.mock import patch
def test_minion_grains_in_opts():
"""
Minion does not generate grains when they are already in opts
"""
opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] == opts["grains"]
grainsfunc.assert_not_called()
def test_minoin_grains_not_in_opts():
"""
Minion generates grains when they are not already in opts
"""
opts = {"random_startup_delay": 0, "grains": {}}
with patch("salt.loader.grains") as grainsfunc:
minion = salt.minion.Minion(opts)
assert minion.opts["grains"] != {}
grainsfunc.assert_called()
|
|
1acf010efb596b3ca8ab2cd714eee6d19505ccf6
|
scripts/showlog.py
|
scripts/showlog.py
|
#!/usr/bin/python3
import sys
import argparse
from kafka import KafkaConsumer
parser = argparse.ArgumentParser(description='Zoe Kafka log viewer')
parser.add_argument('kafka_address', help='Address of the Kafka broker')
parser.add_argument('--list-logs', action='store_true', help='List all the available service logs')
parser.add_argument('--topic', help='Service name to fetch and monitor for activity')
args = parser.parse_args()
consumer = KafkaConsumer(bootstrap_servers=args.kafka_address)
if args.list_logs:
for topic in consumer.topics():
if topic[0] != '_':
print(topic)
sys.exit(0)
consumer.subscribe(pattern=args.topic)
consumer.poll(1)
consumer.seek_to_beginning()
try:
for msg in consumer:
print(msg.value.decode('utf-8'))
except KeyboardInterrupt:
print('showlog exiting...')
|
Add a script to retrieve logs from kafka
|
Add a script to retrieve logs from kafka
|
Python
|
apache-2.0
|
DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe
|
Add a script to retrieve logs from kafka
|
#!/usr/bin/python3
import sys
import argparse
from kafka import KafkaConsumer
parser = argparse.ArgumentParser(description='Zoe Kafka log viewer')
parser.add_argument('kafka_address', help='Address of the Kafka broker')
parser.add_argument('--list-logs', action='store_true', help='List all the available service logs')
parser.add_argument('--topic', help='Service name to fetch and monitor for activity')
args = parser.parse_args()
consumer = KafkaConsumer(bootstrap_servers=args.kafka_address)
if args.list_logs:
for topic in consumer.topics():
if topic[0] != '_':
print(topic)
sys.exit(0)
consumer.subscribe(pattern=args.topic)
consumer.poll(1)
consumer.seek_to_beginning()
try:
for msg in consumer:
print(msg.value.decode('utf-8'))
except KeyboardInterrupt:
print('showlog exiting...')
|
<commit_before><commit_msg>Add a script to retrieve logs from kafka<commit_after>
|
#!/usr/bin/python3
import sys
import argparse
from kafka import KafkaConsumer
parser = argparse.ArgumentParser(description='Zoe Kafka log viewer')
parser.add_argument('kafka_address', help='Address of the Kafka broker')
parser.add_argument('--list-logs', action='store_true', help='List all the available service logs')
parser.add_argument('--topic', help='Service name to fetch and monitor for activity')
args = parser.parse_args()
consumer = KafkaConsumer(bootstrap_servers=args.kafka_address)
if args.list_logs:
for topic in consumer.topics():
if topic[0] != '_':
print(topic)
sys.exit(0)
consumer.subscribe(pattern=args.topic)
consumer.poll(1)
consumer.seek_to_beginning()
try:
for msg in consumer:
print(msg.value.decode('utf-8'))
except KeyboardInterrupt:
print('showlog exiting...')
|
Add a script to retrieve logs from kafka#!/usr/bin/python3
import sys
import argparse
from kafka import KafkaConsumer
parser = argparse.ArgumentParser(description='Zoe Kafka log viewer')
parser.add_argument('kafka_address', help='Address of the Kafka broker')
parser.add_argument('--list-logs', action='store_true', help='List all the available service logs')
parser.add_argument('--topic', help='Service name to fetch and monitor for activity')
args = parser.parse_args()
consumer = KafkaConsumer(bootstrap_servers=args.kafka_address)
if args.list_logs:
for topic in consumer.topics():
if topic[0] != '_':
print(topic)
sys.exit(0)
consumer.subscribe(pattern=args.topic)
consumer.poll(1)
consumer.seek_to_beginning()
try:
for msg in consumer:
print(msg.value.decode('utf-8'))
except KeyboardInterrupt:
print('showlog exiting...')
|
<commit_before><commit_msg>Add a script to retrieve logs from kafka<commit_after>#!/usr/bin/python3
import sys
import argparse
from kafka import KafkaConsumer
parser = argparse.ArgumentParser(description='Zoe Kafka log viewer')
parser.add_argument('kafka_address', help='Address of the Kafka broker')
parser.add_argument('--list-logs', action='store_true', help='List all the available service logs')
parser.add_argument('--topic', help='Service name to fetch and monitor for activity')
args = parser.parse_args()
consumer = KafkaConsumer(bootstrap_servers=args.kafka_address)
if args.list_logs:
for topic in consumer.topics():
if topic[0] != '_':
print(topic)
sys.exit(0)
consumer.subscribe(pattern=args.topic)
consumer.poll(1)
consumer.seek_to_beginning()
try:
for msg in consumer:
print(msg.value.decode('utf-8'))
except KeyboardInterrupt:
print('showlog exiting...')
|
|
a66d15d95e7ec62da12ccade0894e78e8dba6673
|
cappa/factory.py
|
cappa/factory.py
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
def manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return Pip
elif manager_key == 'pip3':
return Pip3
elif manager_key == 'pip_pypy':
return PipPypy
elif manager_key == 'sys':
return Apt
elif manager_key == 'npm':
return Npm
elif manager_key == 'npmg':
return NpmG
elif manager_key == 'bower':
return Bower
elif manager_key == 'tsd':
return Tsd
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'bower': Bower,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
Switch if-else block to dictionary lookup
|
Switch if-else block to dictionary lookup
|
Python
|
mit
|
Captricity/cappa,Captricity/cappa
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
def manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return Pip
elif manager_key == 'pip3':
return Pip3
elif manager_key == 'pip_pypy':
return PipPypy
elif manager_key == 'sys':
return Apt
elif manager_key == 'npm':
return Npm
elif manager_key == 'npmg':
return NpmG
elif manager_key == 'bower':
return Bower
elif manager_key == 'tsd':
return Tsd
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
Switch if-else block to dictionary lookup
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'bower': Bower,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
<commit_before>from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
def manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return Pip
elif manager_key == 'pip3':
return Pip3
elif manager_key == 'pip_pypy':
return PipPypy
elif manager_key == 'sys':
return Apt
elif manager_key == 'npm':
return Npm
elif manager_key == 'npmg':
return NpmG
elif manager_key == 'bower':
return Bower
elif manager_key == 'tsd':
return Tsd
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
<commit_msg>Switch if-else block to dictionary lookup<commit_after>
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'bower': Bower,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
def manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return Pip
elif manager_key == 'pip3':
return Pip3
elif manager_key == 'pip_pypy':
return PipPypy
elif manager_key == 'sys':
return Apt
elif manager_key == 'npm':
return Npm
elif manager_key == 'npmg':
return NpmG
elif manager_key == 'bower':
return Bower
elif manager_key == 'tsd':
return Tsd
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
Switch if-else block to dictionary lookupfrom __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'bower': Bower,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
<commit_before>from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
def manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return Pip
elif manager_key == 'pip3':
return Pip3
elif manager_key == 'pip_pypy':
return PipPypy
elif manager_key == 'sys':
return Apt
elif manager_key == 'npm':
return Npm
elif manager_key == 'npmg':
return NpmG
elif manager_key == 'bower':
return Bower
elif manager_key == 'tsd':
return Tsd
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key == 'pip':
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
<commit_msg>Switch if-else block to dictionary lookup<commit_after>from __future__ import print_function, absolute_import
from .pip import Pip
from .pip3 import Pip3
from .pip_pypy import PipPypy
from .apt import Apt
from .bower import Bower
from .npm import Npm
from .npmg import NpmG
from .tsd import Tsd
from .private.pip import PrivatePip
MANAGER_MAP = {
'pip': Pip,
'pip3': Pip3,
'pip_pypy': PipPypy,
'sys': Apt,
'npm': Npm,
'npmg': NpmG,
'bower': Bower,
'tsd': Tsd
}
PRIVATE_MANAGER_MAP = {
'pip': PrivatePip
}
def manager_key_to_cappa(manager_key):
if manager_key in MANAGER_MAP:
return MANAGER_MAP[manager_key]
else:
raise UnknownManager('{} is not a supported manager.'.format(manager_key))
def private_manager_key_to_cappa(manager_key):
if manager_key in PRIVATE_MANAGER_MAP:
return PrivatePip
else:
raise UnknownManager('{} is not a supported private repo manager.'.format(manager_key))
|
5193535b98513ae67a4d53b0314f1bd322aa2109
|
pyecore/javatransmap.py
|
pyecore/javatransmap.py
|
import datetime
# Must be completed
# tuple is '(implem_type, use_type_as_factory, default_value)'
javaTransMap = {
'int': (int, False, 0),
'boolean': (bool, False, False),
'byte': (int, False, 0),
'short': (int, False, 0),
'long': (int, False, 0),
'float': (float, False, 0.0),
'char': (str, False, ''),
'double': (float, False, 0.0),
'byte[]': (bytearray, True, None),
'java.lang.Integer': (int, False, None),
'java.lang.String': (str, False, None),
'java.lang.Character': (str, False, None),
'java.lang.Boolean': (bool, False, False),
'java.lang.Short': (int, False, None),
'java.lang.Long': (int, False, None),
'java.lang.Float': (float, False, None),
'java.lang.Double': (float, False, None),
'java.lang.Class': (type, False, None),
'java.lang.Byte': (int, False, None),
'java.lang.Object': (object, False, None),
'java.util.List': (list, True, None),
'java.util.Set': (set, True, None),
'java.util.Map': (dict, True, None),
'java.util.Map$Entry': (dict, True, None),
'java.util.Date': (datetime, False, None),
'org.eclipse.emf.common.util.EList': (list, True, None),
'org.eclipse.emf.ecore.util.FeatureMap': (dict, True, None),
'org.eclipse.emf.ecore.util.FeatureMap$Entry': (dict, True, None)
}
|
Move JavaTransMap to another module
|
Move JavaTransMap to another module
|
Python
|
bsd-3-clause
|
pyecore/pyecore,aranega/pyecore
|
Move JavaTransMap to another module
|
import datetime
# Must be completed
# tuple is '(implem_type, use_type_as_factory, default_value)'
javaTransMap = {
'int': (int, False, 0),
'boolean': (bool, False, False),
'byte': (int, False, 0),
'short': (int, False, 0),
'long': (int, False, 0),
'float': (float, False, 0.0),
'char': (str, False, ''),
'double': (float, False, 0.0),
'byte[]': (bytearray, True, None),
'java.lang.Integer': (int, False, None),
'java.lang.String': (str, False, None),
'java.lang.Character': (str, False, None),
'java.lang.Boolean': (bool, False, False),
'java.lang.Short': (int, False, None),
'java.lang.Long': (int, False, None),
'java.lang.Float': (float, False, None),
'java.lang.Double': (float, False, None),
'java.lang.Class': (type, False, None),
'java.lang.Byte': (int, False, None),
'java.lang.Object': (object, False, None),
'java.util.List': (list, True, None),
'java.util.Set': (set, True, None),
'java.util.Map': (dict, True, None),
'java.util.Map$Entry': (dict, True, None),
'java.util.Date': (datetime, False, None),
'org.eclipse.emf.common.util.EList': (list, True, None),
'org.eclipse.emf.ecore.util.FeatureMap': (dict, True, None),
'org.eclipse.emf.ecore.util.FeatureMap$Entry': (dict, True, None)
}
|
<commit_before><commit_msg>Move JavaTransMap to another module<commit_after>
|
import datetime
# Must be completed
# tuple is '(implem_type, use_type_as_factory, default_value)'
javaTransMap = {
'int': (int, False, 0),
'boolean': (bool, False, False),
'byte': (int, False, 0),
'short': (int, False, 0),
'long': (int, False, 0),
'float': (float, False, 0.0),
'char': (str, False, ''),
'double': (float, False, 0.0),
'byte[]': (bytearray, True, None),
'java.lang.Integer': (int, False, None),
'java.lang.String': (str, False, None),
'java.lang.Character': (str, False, None),
'java.lang.Boolean': (bool, False, False),
'java.lang.Short': (int, False, None),
'java.lang.Long': (int, False, None),
'java.lang.Float': (float, False, None),
'java.lang.Double': (float, False, None),
'java.lang.Class': (type, False, None),
'java.lang.Byte': (int, False, None),
'java.lang.Object': (object, False, None),
'java.util.List': (list, True, None),
'java.util.Set': (set, True, None),
'java.util.Map': (dict, True, None),
'java.util.Map$Entry': (dict, True, None),
'java.util.Date': (datetime, False, None),
'org.eclipse.emf.common.util.EList': (list, True, None),
'org.eclipse.emf.ecore.util.FeatureMap': (dict, True, None),
'org.eclipse.emf.ecore.util.FeatureMap$Entry': (dict, True, None)
}
|
Move JavaTransMap to another moduleimport datetime
# Must be completed
# tuple is '(implem_type, use_type_as_factory, default_value)'
javaTransMap = {
'int': (int, False, 0),
'boolean': (bool, False, False),
'byte': (int, False, 0),
'short': (int, False, 0),
'long': (int, False, 0),
'float': (float, False, 0.0),
'char': (str, False, ''),
'double': (float, False, 0.0),
'byte[]': (bytearray, True, None),
'java.lang.Integer': (int, False, None),
'java.lang.String': (str, False, None),
'java.lang.Character': (str, False, None),
'java.lang.Boolean': (bool, False, False),
'java.lang.Short': (int, False, None),
'java.lang.Long': (int, False, None),
'java.lang.Float': (float, False, None),
'java.lang.Double': (float, False, None),
'java.lang.Class': (type, False, None),
'java.lang.Byte': (int, False, None),
'java.lang.Object': (object, False, None),
'java.util.List': (list, True, None),
'java.util.Set': (set, True, None),
'java.util.Map': (dict, True, None),
'java.util.Map$Entry': (dict, True, None),
'java.util.Date': (datetime, False, None),
'org.eclipse.emf.common.util.EList': (list, True, None),
'org.eclipse.emf.ecore.util.FeatureMap': (dict, True, None),
'org.eclipse.emf.ecore.util.FeatureMap$Entry': (dict, True, None)
}
|
<commit_before><commit_msg>Move JavaTransMap to another module<commit_after>import datetime
# Must be completed
# tuple is '(implem_type, use_type_as_factory, default_value)'
javaTransMap = {
'int': (int, False, 0),
'boolean': (bool, False, False),
'byte': (int, False, 0),
'short': (int, False, 0),
'long': (int, False, 0),
'float': (float, False, 0.0),
'char': (str, False, ''),
'double': (float, False, 0.0),
'byte[]': (bytearray, True, None),
'java.lang.Integer': (int, False, None),
'java.lang.String': (str, False, None),
'java.lang.Character': (str, False, None),
'java.lang.Boolean': (bool, False, False),
'java.lang.Short': (int, False, None),
'java.lang.Long': (int, False, None),
'java.lang.Float': (float, False, None),
'java.lang.Double': (float, False, None),
'java.lang.Class': (type, False, None),
'java.lang.Byte': (int, False, None),
'java.lang.Object': (object, False, None),
'java.util.List': (list, True, None),
'java.util.Set': (set, True, None),
'java.util.Map': (dict, True, None),
'java.util.Map$Entry': (dict, True, None),
'java.util.Date': (datetime, False, None),
'org.eclipse.emf.common.util.EList': (list, True, None),
'org.eclipse.emf.ecore.util.FeatureMap': (dict, True, None),
'org.eclipse.emf.ecore.util.FeatureMap$Entry': (dict, True, None)
}
|
|
a7a6e2cd3dd23906a62a94a7c1f6f8d640ee3f0c
|
pylua/test/test_json.py
|
pylua/test/test_json.py
|
#!/usr/bin/env python2
#-*- coding: utf-8 -*-
import json
import logging
logger = logging.getLogger(__name__)
class TestJson:
_JSON_STRING = '''{
"IntParam": 10,
"StrParam": "TestString",
"ArrayParam": [1,2,3],
"ObjParam": {"ObjIntParam": 11, "ObjStrParam": "ObjTestString"}
}'''
_json_obj = None
def __init__(self):
pass
def setup(self):
logger.debug('Setup 1')
self._json_obj = json.loads(self._JSON_STRING)
assert self._json_obj is not None
assert isinstance(self._json_obj, dict)
def teardown(self):
logger.debug('Teardown')
@classmethod
def setup_class(self):
pass
@classmethod
def teardown_class(self):
pass
def test_int_param_py(self):
assert 'IntParam' in self._json_obj
value = self._json_obj['IntParam']
assert isinstance(value, int)
assert value == 10
def test_int_param_lua(self):
assert False, 'Not imlemented'
def test_string_param_py(self):
assert 'StrParam' in self._json_obj
value = self._json_obj['StrParam']
assert isinstance(value, basestring)
assert value == 'TestString'
def test_string_param_lua(self):
assert False, 'Not imlemented'
def test_array_param_py(self):
assert 'ArrayParam' in self._json_obj
value = self._json_obj['ArrayParam']
assert isinstance(value, list)
assert len(value) == 3
index = 1
for item in value:
assert index == item
index += 1
assert index == 4
def test_array_param_lua(self):
assert False, 'Not imlemented'
def test_obj_param_py(self):
assert 'ObjParam' in self._json_obj
value = self._json_obj['ObjParam']
assert isinstance(value, dict)
assert 'ObjIntParam' in value
param = value['ObjIntParam']
assert isinstance(param, int)
assert param == 11
assert 'ObjStrParam' in value
param = value['ObjStrParam']
assert isinstance(param, basestring)
assert param == 'ObjTestString'
def test_obj_param_lua(self):
assert False, 'Not imlemented'
|
Add test for json parsing. Lua part is not implemented.
|
Add test for json parsing. Lua part is not implemented.
|
Python
|
mit
|
malirod/pylua,malirod/pylua
|
Add test for json parsing. Lua part is not implemented.
|
#!/usr/bin/env python2
#-*- coding: utf-8 -*-
import json
import logging
logger = logging.getLogger(__name__)
class TestJson:
_JSON_STRING = '''{
"IntParam": 10,
"StrParam": "TestString",
"ArrayParam": [1,2,3],
"ObjParam": {"ObjIntParam": 11, "ObjStrParam": "ObjTestString"}
}'''
_json_obj = None
def __init__(self):
pass
def setup(self):
logger.debug('Setup 1')
self._json_obj = json.loads(self._JSON_STRING)
assert self._json_obj is not None
assert isinstance(self._json_obj, dict)
def teardown(self):
logger.debug('Teardown')
@classmethod
def setup_class(self):
pass
@classmethod
def teardown_class(self):
pass
def test_int_param_py(self):
assert 'IntParam' in self._json_obj
value = self._json_obj['IntParam']
assert isinstance(value, int)
assert value == 10
def test_int_param_lua(self):
assert False, 'Not imlemented'
def test_string_param_py(self):
assert 'StrParam' in self._json_obj
value = self._json_obj['StrParam']
assert isinstance(value, basestring)
assert value == 'TestString'
def test_string_param_lua(self):
assert False, 'Not imlemented'
def test_array_param_py(self):
assert 'ArrayParam' in self._json_obj
value = self._json_obj['ArrayParam']
assert isinstance(value, list)
assert len(value) == 3
index = 1
for item in value:
assert index == item
index += 1
assert index == 4
def test_array_param_lua(self):
assert False, 'Not imlemented'
def test_obj_param_py(self):
assert 'ObjParam' in self._json_obj
value = self._json_obj['ObjParam']
assert isinstance(value, dict)
assert 'ObjIntParam' in value
param = value['ObjIntParam']
assert isinstance(param, int)
assert param == 11
assert 'ObjStrParam' in value
param = value['ObjStrParam']
assert isinstance(param, basestring)
assert param == 'ObjTestString'
def test_obj_param_lua(self):
assert False, 'Not imlemented'
|
<commit_before><commit_msg>Add test for json parsing. Lua part is not implemented.<commit_after>
|
#!/usr/bin/env python2
#-*- coding: utf-8 -*-
import json
import logging
logger = logging.getLogger(__name__)
class TestJson:
_JSON_STRING = '''{
"IntParam": 10,
"StrParam": "TestString",
"ArrayParam": [1,2,3],
"ObjParam": {"ObjIntParam": 11, "ObjStrParam": "ObjTestString"}
}'''
_json_obj = None
def __init__(self):
pass
def setup(self):
logger.debug('Setup 1')
self._json_obj = json.loads(self._JSON_STRING)
assert self._json_obj is not None
assert isinstance(self._json_obj, dict)
def teardown(self):
logger.debug('Teardown')
@classmethod
def setup_class(self):
pass
@classmethod
def teardown_class(self):
pass
def test_int_param_py(self):
assert 'IntParam' in self._json_obj
value = self._json_obj['IntParam']
assert isinstance(value, int)
assert value == 10
def test_int_param_lua(self):
assert False, 'Not imlemented'
def test_string_param_py(self):
assert 'StrParam' in self._json_obj
value = self._json_obj['StrParam']
assert isinstance(value, basestring)
assert value == 'TestString'
def test_string_param_lua(self):
assert False, 'Not imlemented'
def test_array_param_py(self):
assert 'ArrayParam' in self._json_obj
value = self._json_obj['ArrayParam']
assert isinstance(value, list)
assert len(value) == 3
index = 1
for item in value:
assert index == item
index += 1
assert index == 4
def test_array_param_lua(self):
assert False, 'Not imlemented'
def test_obj_param_py(self):
assert 'ObjParam' in self._json_obj
value = self._json_obj['ObjParam']
assert isinstance(value, dict)
assert 'ObjIntParam' in value
param = value['ObjIntParam']
assert isinstance(param, int)
assert param == 11
assert 'ObjStrParam' in value
param = value['ObjStrParam']
assert isinstance(param, basestring)
assert param == 'ObjTestString'
def test_obj_param_lua(self):
assert False, 'Not imlemented'
|
Add test for json parsing. Lua part is not implemented.#!/usr/bin/env python2
#-*- coding: utf-8 -*-
import json
import logging
logger = logging.getLogger(__name__)
class TestJson:
_JSON_STRING = '''{
"IntParam": 10,
"StrParam": "TestString",
"ArrayParam": [1,2,3],
"ObjParam": {"ObjIntParam": 11, "ObjStrParam": "ObjTestString"}
}'''
_json_obj = None
def __init__(self):
pass
def setup(self):
logger.debug('Setup 1')
self._json_obj = json.loads(self._JSON_STRING)
assert self._json_obj is not None
assert isinstance(self._json_obj, dict)
def teardown(self):
logger.debug('Teardown')
@classmethod
def setup_class(self):
pass
@classmethod
def teardown_class(self):
pass
def test_int_param_py(self):
assert 'IntParam' in self._json_obj
value = self._json_obj['IntParam']
assert isinstance(value, int)
assert value == 10
def test_int_param_lua(self):
assert False, 'Not imlemented'
def test_string_param_py(self):
assert 'StrParam' in self._json_obj
value = self._json_obj['StrParam']
assert isinstance(value, basestring)
assert value == 'TestString'
def test_string_param_lua(self):
assert False, 'Not imlemented'
def test_array_param_py(self):
assert 'ArrayParam' in self._json_obj
value = self._json_obj['ArrayParam']
assert isinstance(value, list)
assert len(value) == 3
index = 1
for item in value:
assert index == item
index += 1
assert index == 4
def test_array_param_lua(self):
assert False, 'Not imlemented'
def test_obj_param_py(self):
assert 'ObjParam' in self._json_obj
value = self._json_obj['ObjParam']
assert isinstance(value, dict)
assert 'ObjIntParam' in value
param = value['ObjIntParam']
assert isinstance(param, int)
assert param == 11
assert 'ObjStrParam' in value
param = value['ObjStrParam']
assert isinstance(param, basestring)
assert param == 'ObjTestString'
def test_obj_param_lua(self):
assert False, 'Not imlemented'
|
<commit_before><commit_msg>Add test for json parsing. Lua part is not implemented.<commit_after>#!/usr/bin/env python2
#-*- coding: utf-8 -*-
import json
import logging
logger = logging.getLogger(__name__)
class TestJson:
_JSON_STRING = '''{
"IntParam": 10,
"StrParam": "TestString",
"ArrayParam": [1,2,3],
"ObjParam": {"ObjIntParam": 11, "ObjStrParam": "ObjTestString"}
}'''
_json_obj = None
def __init__(self):
pass
def setup(self):
logger.debug('Setup 1')
self._json_obj = json.loads(self._JSON_STRING)
assert self._json_obj is not None
assert isinstance(self._json_obj, dict)
def teardown(self):
logger.debug('Teardown')
@classmethod
def setup_class(self):
pass
@classmethod
def teardown_class(self):
pass
def test_int_param_py(self):
assert 'IntParam' in self._json_obj
value = self._json_obj['IntParam']
assert isinstance(value, int)
assert value == 10
def test_int_param_lua(self):
assert False, 'Not imlemented'
def test_string_param_py(self):
assert 'StrParam' in self._json_obj
value = self._json_obj['StrParam']
assert isinstance(value, basestring)
assert value == 'TestString'
def test_string_param_lua(self):
assert False, 'Not imlemented'
def test_array_param_py(self):
assert 'ArrayParam' in self._json_obj
value = self._json_obj['ArrayParam']
assert isinstance(value, list)
assert len(value) == 3
index = 1
for item in value:
assert index == item
index += 1
assert index == 4
def test_array_param_lua(self):
assert False, 'Not imlemented'
def test_obj_param_py(self):
assert 'ObjParam' in self._json_obj
value = self._json_obj['ObjParam']
assert isinstance(value, dict)
assert 'ObjIntParam' in value
param = value['ObjIntParam']
assert isinstance(param, int)
assert param == 11
assert 'ObjStrParam' in value
param = value['ObjStrParam']
assert isinstance(param, basestring)
assert param == 'ObjTestString'
def test_obj_param_lua(self):
assert False, 'Not imlemented'
|
|
77673e488ee258d0d25c05e5ee3bf109bd2f8a35
|
skan/test/test_io.py
|
skan/test/test_io.py
|
import numpy as np
import pandas as pd
from skan import io
from skimage._shared._tempfile import temporary_file
def test_write_excel_tables():
num_sheets = np.random.randint(1, 4)
num_cols = np.random.randint(1, 5, size=num_sheets)
num_rows = np.random.randint(20, 40, size=num_sheets)
tables = []
for m, n in zip(num_rows, num_cols):
columns = [f'column{i}' for i in range(n)]
data = np.random.random((m, n))
tables.append(pd.DataFrame(data=data, columns=columns))
sheet_names = [f'sheet {i}' for i in range(num_sheets)]
kwargs = dict(zip(sheet_names, tables))
kwargs['config'] = {'image files': ['image1.tif', 'image2.tif'],
'image format': 'fei',
'threshold radius': 5e-8}
with temporary_file(suffix='.xlsx') as file:
io.write_excel(file, **kwargs)
tables_in = [pd.read_excel(file, sheetname=name)
for name in sheet_names]
config_in_df = pd.read_excel(file, sheetname='config')
config_in = dict(zip(config_in_df['parameters'],
config_in_df['values']))
for table, table_in in zip(tables, tables_in):
assert list(table.columns) == list(table_in.columns)
np.testing.assert_allclose(table_in.values, table.values)
for key, val in kwargs['config'].items():
str(val) == str(config_in[key])
|
Add testing for Excel writer
|
Add testing for Excel writer
|
Python
|
bsd-3-clause
|
jni/skan
|
Add testing for Excel writer
|
import numpy as np
import pandas as pd
from skan import io
from skimage._shared._tempfile import temporary_file
def test_write_excel_tables():
num_sheets = np.random.randint(1, 4)
num_cols = np.random.randint(1, 5, size=num_sheets)
num_rows = np.random.randint(20, 40, size=num_sheets)
tables = []
for m, n in zip(num_rows, num_cols):
columns = [f'column{i}' for i in range(n)]
data = np.random.random((m, n))
tables.append(pd.DataFrame(data=data, columns=columns))
sheet_names = [f'sheet {i}' for i in range(num_sheets)]
kwargs = dict(zip(sheet_names, tables))
kwargs['config'] = {'image files': ['image1.tif', 'image2.tif'],
'image format': 'fei',
'threshold radius': 5e-8}
with temporary_file(suffix='.xlsx') as file:
io.write_excel(file, **kwargs)
tables_in = [pd.read_excel(file, sheetname=name)
for name in sheet_names]
config_in_df = pd.read_excel(file, sheetname='config')
config_in = dict(zip(config_in_df['parameters'],
config_in_df['values']))
for table, table_in in zip(tables, tables_in):
assert list(table.columns) == list(table_in.columns)
np.testing.assert_allclose(table_in.values, table.values)
for key, val in kwargs['config'].items():
str(val) == str(config_in[key])
|
<commit_before><commit_msg>Add testing for Excel writer<commit_after>
|
import numpy as np
import pandas as pd
from skan import io
from skimage._shared._tempfile import temporary_file
def test_write_excel_tables():
num_sheets = np.random.randint(1, 4)
num_cols = np.random.randint(1, 5, size=num_sheets)
num_rows = np.random.randint(20, 40, size=num_sheets)
tables = []
for m, n in zip(num_rows, num_cols):
columns = [f'column{i}' for i in range(n)]
data = np.random.random((m, n))
tables.append(pd.DataFrame(data=data, columns=columns))
sheet_names = [f'sheet {i}' for i in range(num_sheets)]
kwargs = dict(zip(sheet_names, tables))
kwargs['config'] = {'image files': ['image1.tif', 'image2.tif'],
'image format': 'fei',
'threshold radius': 5e-8}
with temporary_file(suffix='.xlsx') as file:
io.write_excel(file, **kwargs)
tables_in = [pd.read_excel(file, sheetname=name)
for name in sheet_names]
config_in_df = pd.read_excel(file, sheetname='config')
config_in = dict(zip(config_in_df['parameters'],
config_in_df['values']))
for table, table_in in zip(tables, tables_in):
assert list(table.columns) == list(table_in.columns)
np.testing.assert_allclose(table_in.values, table.values)
for key, val in kwargs['config'].items():
str(val) == str(config_in[key])
|
Add testing for Excel writerimport numpy as np
import pandas as pd
from skan import io
from skimage._shared._tempfile import temporary_file
def test_write_excel_tables():
num_sheets = np.random.randint(1, 4)
num_cols = np.random.randint(1, 5, size=num_sheets)
num_rows = np.random.randint(20, 40, size=num_sheets)
tables = []
for m, n in zip(num_rows, num_cols):
columns = [f'column{i}' for i in range(n)]
data = np.random.random((m, n))
tables.append(pd.DataFrame(data=data, columns=columns))
sheet_names = [f'sheet {i}' for i in range(num_sheets)]
kwargs = dict(zip(sheet_names, tables))
kwargs['config'] = {'image files': ['image1.tif', 'image2.tif'],
'image format': 'fei',
'threshold radius': 5e-8}
with temporary_file(suffix='.xlsx') as file:
io.write_excel(file, **kwargs)
tables_in = [pd.read_excel(file, sheetname=name)
for name in sheet_names]
config_in_df = pd.read_excel(file, sheetname='config')
config_in = dict(zip(config_in_df['parameters'],
config_in_df['values']))
for table, table_in in zip(tables, tables_in):
assert list(table.columns) == list(table_in.columns)
np.testing.assert_allclose(table_in.values, table.values)
for key, val in kwargs['config'].items():
str(val) == str(config_in[key])
|
<commit_before><commit_msg>Add testing for Excel writer<commit_after>import numpy as np
import pandas as pd
from skan import io
from skimage._shared._tempfile import temporary_file
def test_write_excel_tables():
num_sheets = np.random.randint(1, 4)
num_cols = np.random.randint(1, 5, size=num_sheets)
num_rows = np.random.randint(20, 40, size=num_sheets)
tables = []
for m, n in zip(num_rows, num_cols):
columns = [f'column{i}' for i in range(n)]
data = np.random.random((m, n))
tables.append(pd.DataFrame(data=data, columns=columns))
sheet_names = [f'sheet {i}' for i in range(num_sheets)]
kwargs = dict(zip(sheet_names, tables))
kwargs['config'] = {'image files': ['image1.tif', 'image2.tif'],
'image format': 'fei',
'threshold radius': 5e-8}
with temporary_file(suffix='.xlsx') as file:
io.write_excel(file, **kwargs)
tables_in = [pd.read_excel(file, sheetname=name)
for name in sheet_names]
config_in_df = pd.read_excel(file, sheetname='config')
config_in = dict(zip(config_in_df['parameters'],
config_in_df['values']))
for table, table_in in zip(tables, tables_in):
assert list(table.columns) == list(table_in.columns)
np.testing.assert_allclose(table_in.values, table.values)
for key, val in kwargs['config'].items():
str(val) == str(config_in[key])
|
|
0ce3e5c87a473c0c861ca7d871a423c0982bdff9
|
utils/list_docs_modified_since.py
|
utils/list_docs_modified_since.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from datetime import datetime
from dateutil.parser import parse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('since_date', help="Script will list docs updated since midnight on this date, GMT. Format YYYY-MM-DD", type=valid_date)
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch docs for path {}".format(dh.path)
objects = dh.fetch_objects()
component_count = 0
for obj in objects:
last_mod_str = obj['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
components = dh.fetch_components(obj)
for c in components:
last_mod_str = c['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
def valid_date(string):
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
msg = "Not a valid date: '{}'.".format(string)
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
sys.exit(main())
|
Add script for listing docs modified since date
|
Add script for listing docs modified since date
|
Python
|
bsd-3-clause
|
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
|
Add script for listing docs modified since date
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from datetime import datetime
from dateutil.parser import parse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('since_date', help="Script will list docs updated since midnight on this date, GMT. Format YYYY-MM-DD", type=valid_date)
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch docs for path {}".format(dh.path)
objects = dh.fetch_objects()
component_count = 0
for obj in objects:
last_mod_str = obj['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
components = dh.fetch_components(obj)
for c in components:
last_mod_str = c['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
def valid_date(string):
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
msg = "Not a valid date: '{}'.".format(string)
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script for listing docs modified since date<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from datetime import datetime
from dateutil.parser import parse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('since_date', help="Script will list docs updated since midnight on this date, GMT. Format YYYY-MM-DD", type=valid_date)
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch docs for path {}".format(dh.path)
objects = dh.fetch_objects()
component_count = 0
for obj in objects:
last_mod_str = obj['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
components = dh.fetch_components(obj)
for c in components:
last_mod_str = c['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
def valid_date(string):
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
msg = "Not a valid date: '{}'.".format(string)
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
sys.exit(main())
|
Add script for listing docs modified since date#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from datetime import datetime
from dateutil.parser import parse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('since_date', help="Script will list docs updated since midnight on this date, GMT. Format YYYY-MM-DD", type=valid_date)
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch docs for path {}".format(dh.path)
objects = dh.fetch_objects()
component_count = 0
for obj in objects:
last_mod_str = obj['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
components = dh.fetch_components(obj)
for c in components:
last_mod_str = c['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
def valid_date(string):
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
msg = "Not a valid date: '{}'.".format(string)
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script for listing docs modified since date<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import argparse
from datetime import datetime
from dateutil.parser import parse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(
description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('since_date', help="Script will list docs updated since midnight on this date, GMT. Format YYYY-MM-DD", type=valid_date)
parser.add_argument(
'--pynuxrc',
default='~/.pynuxrc',
help="rcfile for use with pynux utils")
parser.add_argument(
'--components',
action='store_true',
help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch docs for path {}".format(dh.path)
objects = dh.fetch_objects()
component_count = 0
for obj in objects:
last_mod_str = obj['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
components = dh.fetch_components(obj)
for c in components:
last_mod_str = c['lastModified'][:10]
last_mod_date = parse(last_mod_str)
if last_mod_date > argv.since_date:
print last_mod_str, obj['path']
'''
def valid_date(string):
try:
return datetime.strptime(string, '%Y-%m-%d')
except ValueError:
msg = "Not a valid date: '{}'.".format(string)
raise argparse.ArgumentTypeError(msg)
if __name__ == "__main__":
sys.exit(main())
|
|
91318f3fc3138878ac2dba9fe668b84c4921e1df
|
bin/check_fasta.py
|
bin/check_fasta.py
|
"""
Check a fasta file to make sure it is correct
"""
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Check the integrity of a fasta file")
parser.add_argument('-f', help='fasta file to check', required=True)
parser.add_argument('-o', help='output file to write', required=True)
parser.add_argument('-s', help='strip new lines from sequence (default: keep new lines)', action="store_true")
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
seqid = None
seq = ""
with open(args.f, 'r') as fin:
with open(args.o, 'w') as out:
for l in fin:
l = l.strip()
if not l:
continue
if l.startswith(">"):
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
seqid = l + "\n"
seq = ""
continue
if ">" in l:
# line contains some sequence and then a new header line
tmpseq = l[:l.index('>')]
tmpid = l[l.index('>'):]
seq += tmpseq + "\n"
out.write(seqid)
out.write(seq)
seqid = tmpid + "\n"
seq = ""
continue
if args.s:
seq += l
else:
seq += l + "\n"
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
|
Check and rewrite a fasta file
|
Check and rewrite a fasta file
|
Python
|
mit
|
linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab
|
Check and rewrite a fasta file
|
"""
Check a fasta file to make sure it is correct
"""
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Check the integrity of a fasta file")
parser.add_argument('-f', help='fasta file to check', required=True)
parser.add_argument('-o', help='output file to write', required=True)
parser.add_argument('-s', help='strip new lines from sequence (default: keep new lines)', action="store_true")
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
seqid = None
seq = ""
with open(args.f, 'r') as fin:
with open(args.o, 'w') as out:
for l in fin:
l = l.strip()
if not l:
continue
if l.startswith(">"):
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
seqid = l + "\n"
seq = ""
continue
if ">" in l:
# line contains some sequence and then a new header line
tmpseq = l[:l.index('>')]
tmpid = l[l.index('>'):]
seq += tmpseq + "\n"
out.write(seqid)
out.write(seq)
seqid = tmpid + "\n"
seq = ""
continue
if args.s:
seq += l
else:
seq += l + "\n"
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
|
<commit_before><commit_msg>Check and rewrite a fasta file<commit_after>
|
"""
Check a fasta file to make sure it is correct
"""
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Check the integrity of a fasta file")
parser.add_argument('-f', help='fasta file to check', required=True)
parser.add_argument('-o', help='output file to write', required=True)
parser.add_argument('-s', help='strip new lines from sequence (default: keep new lines)', action="store_true")
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
seqid = None
seq = ""
with open(args.f, 'r') as fin:
with open(args.o, 'w') as out:
for l in fin:
l = l.strip()
if not l:
continue
if l.startswith(">"):
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
seqid = l + "\n"
seq = ""
continue
if ">" in l:
# line contains some sequence and then a new header line
tmpseq = l[:l.index('>')]
tmpid = l[l.index('>'):]
seq += tmpseq + "\n"
out.write(seqid)
out.write(seq)
seqid = tmpid + "\n"
seq = ""
continue
if args.s:
seq += l
else:
seq += l + "\n"
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
|
Check and rewrite a fasta file"""
Check a fasta file to make sure it is correct
"""
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Check the integrity of a fasta file")
parser.add_argument('-f', help='fasta file to check', required=True)
parser.add_argument('-o', help='output file to write', required=True)
parser.add_argument('-s', help='strip new lines from sequence (default: keep new lines)', action="store_true")
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
seqid = None
seq = ""
with open(args.f, 'r') as fin:
with open(args.o, 'w') as out:
for l in fin:
l = l.strip()
if not l:
continue
if l.startswith(">"):
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
seqid = l + "\n"
seq = ""
continue
if ">" in l:
# line contains some sequence and then a new header line
tmpseq = l[:l.index('>')]
tmpid = l[l.index('>'):]
seq += tmpseq + "\n"
out.write(seqid)
out.write(seq)
seqid = tmpid + "\n"
seq = ""
continue
if args.s:
seq += l
else:
seq += l + "\n"
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
|
<commit_before><commit_msg>Check and rewrite a fasta file<commit_after>"""
Check a fasta file to make sure it is correct
"""
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Check the integrity of a fasta file")
parser.add_argument('-f', help='fasta file to check', required=True)
parser.add_argument('-o', help='output file to write', required=True)
parser.add_argument('-s', help='strip new lines from sequence (default: keep new lines)', action="store_true")
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
seqid = None
seq = ""
with open(args.f, 'r') as fin:
with open(args.o, 'w') as out:
for l in fin:
l = l.strip()
if not l:
continue
if l.startswith(">"):
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
seqid = l + "\n"
seq = ""
continue
if ">" in l:
# line contains some sequence and then a new header line
tmpseq = l[:l.index('>')]
tmpid = l[l.index('>'):]
seq += tmpseq + "\n"
out.write(seqid)
out.write(seq)
seqid = tmpid + "\n"
seq = ""
continue
if args.s:
seq += l
else:
seq += l + "\n"
if seqid:
if seq:
out.write(seqid)
out.write(seq)
elif args.v:
sys.stderr.write("There is no sequence for {}. Skipped\n".format(seqid))
|
|
599f093ba30afbf169f21559ca247eaba99dcebf
|
samples/custom/forms.py
|
samples/custom/forms.py
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
attrs={'class': 'inline'},
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
Add inline class to YesNoIgnored field
|
:memo: Add inline class to YesNoIgnored field
|
Python
|
mit
|
gcrsaldanha/fiocruz,gems-uff/labsys,gcrsaldanha/fiocruz,gems-uff/labsys,gems-uff/labsys
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
:memo: Add inline class to YesNoIgnored field
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
attrs={'class': 'inline'},
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
<commit_before>from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
<commit_msg>:memo: Add inline class to YesNoIgnored field<commit_after>
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
attrs={'class': 'inline'},
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
:memo: Add inline class to YesNoIgnored fieldfrom django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
attrs={'class': 'inline'},
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
<commit_before>from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
<commit_msg>:memo: Add inline class to YesNoIgnored field<commit_after>from django import forms
class YesNoIgnoredField(forms.NullBooleanField):
widget = forms.widgets.RadioSelect(
attrs={'class': 'inline'},
choices=(
(True, "Sim"), (False, "Não"), (None, "Ignorado"),
),
)
|
aa21d5360469b5976f82e55d28a5b920e459c3c1
|
scripts/syntax_check.py
|
scripts/syntax_check.py
|
#!/usr/bin/env python
"""
Run lint checks for Synapse.
Requires pycodestyle to be installed.
Forked from https://gist.github.com/810399
Updated from https://github.com/cbrueffer/pep8-git-hook
"""
from __future__ import print_function
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
'--format=pylint',
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
args = ['pycodestyle']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.append('.')
output = system(*args)
if output:
print('PEP8 style violations have been detected.\n')
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
|
Add a syntax check script.
|
Add a syntax check script.
|
Python
|
apache-2.0
|
vertexproject/synapse,vertexproject/synapse,vivisect/synapse,vertexproject/synapse
|
Add a syntax check script.
|
#!/usr/bin/env python
"""
Run lint checks for Synapse.
Requires pycodestyle to be installed.
Forked from https://gist.github.com/810399
Updated from https://github.com/cbrueffer/pep8-git-hook
"""
from __future__ import print_function
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
'--format=pylint',
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
args = ['pycodestyle']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.append('.')
output = system(*args)
if output:
print('PEP8 style violations have been detected.\n')
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a syntax check script.<commit_after>
|
#!/usr/bin/env python
"""
Run lint checks for Synapse.
Requires pycodestyle to be installed.
Forked from https://gist.github.com/810399
Updated from https://github.com/cbrueffer/pep8-git-hook
"""
from __future__ import print_function
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
'--format=pylint',
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
args = ['pycodestyle']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.append('.')
output = system(*args)
if output:
print('PEP8 style violations have been detected.\n')
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
|
Add a syntax check script.#!/usr/bin/env python
"""
Run lint checks for Synapse.
Requires pycodestyle to be installed.
Forked from https://gist.github.com/810399
Updated from https://github.com/cbrueffer/pep8-git-hook
"""
from __future__ import print_function
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
'--format=pylint',
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
args = ['pycodestyle']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.append('.')
output = system(*args)
if output:
print('PEP8 style violations have been detected.\n')
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a syntax check script.<commit_after>#!/usr/bin/env python
"""
Run lint checks for Synapse.
Requires pycodestyle to be installed.
Forked from https://gist.github.com/810399
Updated from https://github.com/cbrueffer/pep8-git-hook
"""
from __future__ import print_function
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
'--format=pylint',
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
args = ['pycodestyle']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.append('.')
output = system(*args)
if output:
print('PEP8 style violations have been detected.\n')
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
|
|
1e6a633d09d0e5f6a5a44f24b12216e464f3f8b6
|
mygpo/data/models.py
|
mygpo/data/models.py
|
from datetime import datetime
from django.db import models
from mygpo.podcasts.models import Podcast
class PodcastUpdateResult(models.Model):
""" Results of a podcast update
Once an instance is stored, the update is assumed to be finished. """
# The podcast that was updated
podcast = models.ForeignKey(Podcast, on_delete=models.CASCADE)
# The timestamp at which the updated started to be executed
start = models.DateTimeField(default=datetime.utcnow)
# The duration of the update
duration = models.DurationField()
# A flad indicating whether the update was successful
successful = models.BooleanField()
# An error message. Should be empty if the update was successful
error_message = models.TextField()
# A flag indicating whether the update created the podcast
podcast_created = models.BooleanField()
# The number of episodes that were created by the update
episodes_added = models.IntegerField()
class Meta(object):
get_latest_by = 'start'
order_with_respect_to = 'podcast'
ordering = ['-start']
indexes = [
models.Index(fields=['podcast', 'start'])
]
|
Create model for Podcast update results
|
Create model for Podcast update results
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
Create model for Podcast update results
|
from datetime import datetime
from django.db import models
from mygpo.podcasts.models import Podcast
class PodcastUpdateResult(models.Model):
""" Results of a podcast update
Once an instance is stored, the update is assumed to be finished. """
# The podcast that was updated
podcast = models.ForeignKey(Podcast, on_delete=models.CASCADE)
# The timestamp at which the updated started to be executed
start = models.DateTimeField(default=datetime.utcnow)
# The duration of the update
duration = models.DurationField()
# A flad indicating whether the update was successful
successful = models.BooleanField()
# An error message. Should be empty if the update was successful
error_message = models.TextField()
# A flag indicating whether the update created the podcast
podcast_created = models.BooleanField()
# The number of episodes that were created by the update
episodes_added = models.IntegerField()
class Meta(object):
get_latest_by = 'start'
order_with_respect_to = 'podcast'
ordering = ['-start']
indexes = [
models.Index(fields=['podcast', 'start'])
]
|
<commit_before><commit_msg>Create model for Podcast update results<commit_after>
|
from datetime import datetime
from django.db import models
from mygpo.podcasts.models import Podcast
class PodcastUpdateResult(models.Model):
""" Results of a podcast update
Once an instance is stored, the update is assumed to be finished. """
# The podcast that was updated
podcast = models.ForeignKey(Podcast, on_delete=models.CASCADE)
# The timestamp at which the updated started to be executed
start = models.DateTimeField(default=datetime.utcnow)
# The duration of the update
duration = models.DurationField()
# A flad indicating whether the update was successful
successful = models.BooleanField()
# An error message. Should be empty if the update was successful
error_message = models.TextField()
# A flag indicating whether the update created the podcast
podcast_created = models.BooleanField()
# The number of episodes that were created by the update
episodes_added = models.IntegerField()
class Meta(object):
get_latest_by = 'start'
order_with_respect_to = 'podcast'
ordering = ['-start']
indexes = [
models.Index(fields=['podcast', 'start'])
]
|
Create model for Podcast update resultsfrom datetime import datetime
from django.db import models
from mygpo.podcasts.models import Podcast
class PodcastUpdateResult(models.Model):
""" Results of a podcast update
Once an instance is stored, the update is assumed to be finished. """
# The podcast that was updated
podcast = models.ForeignKey(Podcast, on_delete=models.CASCADE)
# The timestamp at which the updated started to be executed
start = models.DateTimeField(default=datetime.utcnow)
# The duration of the update
duration = models.DurationField()
# A flad indicating whether the update was successful
successful = models.BooleanField()
# An error message. Should be empty if the update was successful
error_message = models.TextField()
# A flag indicating whether the update created the podcast
podcast_created = models.BooleanField()
# The number of episodes that were created by the update
episodes_added = models.IntegerField()
class Meta(object):
get_latest_by = 'start'
order_with_respect_to = 'podcast'
ordering = ['-start']
indexes = [
models.Index(fields=['podcast', 'start'])
]
|
<commit_before><commit_msg>Create model for Podcast update results<commit_after>from datetime import datetime
from django.db import models
from mygpo.podcasts.models import Podcast
class PodcastUpdateResult(models.Model):
""" Results of a podcast update
Once an instance is stored, the update is assumed to be finished. """
# The podcast that was updated
podcast = models.ForeignKey(Podcast, on_delete=models.CASCADE)
# The timestamp at which the updated started to be executed
start = models.DateTimeField(default=datetime.utcnow)
# The duration of the update
duration = models.DurationField()
# A flad indicating whether the update was successful
successful = models.BooleanField()
# An error message. Should be empty if the update was successful
error_message = models.TextField()
# A flag indicating whether the update created the podcast
podcast_created = models.BooleanField()
# The number of episodes that were created by the update
episodes_added = models.IntegerField()
class Meta(object):
get_latest_by = 'start'
order_with_respect_to = 'podcast'
ordering = ['-start']
indexes = [
models.Index(fields=['podcast', 'start'])
]
|
|
f8fc40540d84c89810581ac33953b5fc0c408800
|
oblique-curl-list.py
|
oblique-curl-list.py
|
##[TBT-Tools]=group
##Input_Footprints=vector
##Output_Curl_List= file
import os,sys
outList = Output_Curl_List
if len(outList) == 0:
outDir = os.environ['HOME']
outList = os.path.join(outDir,'curl.list')
from qgis.utils import *
# Get vector from canvas, must be loaded with features selected.
layer = processing.getObject(Input_Footprints)
selected = layer.selectedFeatures()
numImages = len(selected)
progress.setText('Adding %s images to %s'%(numImages,outList))
if numImages == 0:
progress.setText( 'No images selected for download!' )
progress.setText('Please select some images and try again!')
else:
out = open(outList,'w')
for sf in selected:
imgName = sf.attribute('url')
thisTif = str(imgName)
thisVrt = imgName.replace('.tif','.vrt')
progress.setText('Adding: %s '%imgName)
outTxt = 'url=%s\n-O\nurl=%s\n-O\n' %(thisTif,thisVrt)
out.write(outTxt)
out.close()
progress.setText('List complete!')
cmdTxt = 'You can now execute "curl -K %s" to download the files' % outList
progress.setText(cmdTxt)
time.sleep(5)
|
Add script to generate curl list
|
Add script to generate curl list
|
Python
|
mit
|
eyeNsky/qgis-scripts
|
Add script to generate curl list
|
##[TBT-Tools]=group
##Input_Footprints=vector
##Output_Curl_List= file
import os,sys
outList = Output_Curl_List
if len(outList) == 0:
outDir = os.environ['HOME']
outList = os.path.join(outDir,'curl.list')
from qgis.utils import *
# Get vector from canvas, must be loaded with features selected.
layer = processing.getObject(Input_Footprints)
selected = layer.selectedFeatures()
numImages = len(selected)
progress.setText('Adding %s images to %s'%(numImages,outList))
if numImages == 0:
progress.setText( 'No images selected for download!' )
progress.setText('Please select some images and try again!')
else:
out = open(outList,'w')
for sf in selected:
imgName = sf.attribute('url')
thisTif = str(imgName)
thisVrt = imgName.replace('.tif','.vrt')
progress.setText('Adding: %s '%imgName)
outTxt = 'url=%s\n-O\nurl=%s\n-O\n' %(thisTif,thisVrt)
out.write(outTxt)
out.close()
progress.setText('List complete!')
cmdTxt = 'You can now execute "curl -K %s" to download the files' % outList
progress.setText(cmdTxt)
time.sleep(5)
|
<commit_before><commit_msg>Add script to generate curl list<commit_after>
|
##[TBT-Tools]=group
##Input_Footprints=vector
##Output_Curl_List= file
import os,sys
outList = Output_Curl_List
if len(outList) == 0:
outDir = os.environ['HOME']
outList = os.path.join(outDir,'curl.list')
from qgis.utils import *
# Get vector from canvas, must be loaded with features selected.
layer = processing.getObject(Input_Footprints)
selected = layer.selectedFeatures()
numImages = len(selected)
progress.setText('Adding %s images to %s'%(numImages,outList))
if numImages == 0:
progress.setText( 'No images selected for download!' )
progress.setText('Please select some images and try again!')
else:
out = open(outList,'w')
for sf in selected:
imgName = sf.attribute('url')
thisTif = str(imgName)
thisVrt = imgName.replace('.tif','.vrt')
progress.setText('Adding: %s '%imgName)
outTxt = 'url=%s\n-O\nurl=%s\n-O\n' %(thisTif,thisVrt)
out.write(outTxt)
out.close()
progress.setText('List complete!')
cmdTxt = 'You can now execute "curl -K %s" to download the files' % outList
progress.setText(cmdTxt)
time.sleep(5)
|
Add script to generate curl list##[TBT-Tools]=group
##Input_Footprints=vector
##Output_Curl_List= file
import os,sys
outList = Output_Curl_List
if len(outList) == 0:
outDir = os.environ['HOME']
outList = os.path.join(outDir,'curl.list')
from qgis.utils import *
# Get vector from canvas, must be loaded with features selected.
layer = processing.getObject(Input_Footprints)
selected = layer.selectedFeatures()
numImages = len(selected)
progress.setText('Adding %s images to %s'%(numImages,outList))
if numImages == 0:
progress.setText( 'No images selected for download!' )
progress.setText('Please select some images and try again!')
else:
out = open(outList,'w')
for sf in selected:
imgName = sf.attribute('url')
thisTif = str(imgName)
thisVrt = imgName.replace('.tif','.vrt')
progress.setText('Adding: %s '%imgName)
outTxt = 'url=%s\n-O\nurl=%s\n-O\n' %(thisTif,thisVrt)
out.write(outTxt)
out.close()
progress.setText('List complete!')
cmdTxt = 'You can now execute "curl -K %s" to download the files' % outList
progress.setText(cmdTxt)
time.sleep(5)
|
<commit_before><commit_msg>Add script to generate curl list<commit_after>##[TBT-Tools]=group
##Input_Footprints=vector
##Output_Curl_List= file
import os,sys
outList = Output_Curl_List
if len(outList) == 0:
outDir = os.environ['HOME']
outList = os.path.join(outDir,'curl.list')
from qgis.utils import *
# Get vector from canvas, must be loaded with features selected.
layer = processing.getObject(Input_Footprints)
selected = layer.selectedFeatures()
numImages = len(selected)
progress.setText('Adding %s images to %s'%(numImages,outList))
if numImages == 0:
progress.setText( 'No images selected for download!' )
progress.setText('Please select some images and try again!')
else:
out = open(outList,'w')
for sf in selected:
imgName = sf.attribute('url')
thisTif = str(imgName)
thisVrt = imgName.replace('.tif','.vrt')
progress.setText('Adding: %s '%imgName)
outTxt = 'url=%s\n-O\nurl=%s\n-O\n' %(thisTif,thisVrt)
out.write(outTxt)
out.close()
progress.setText('List complete!')
cmdTxt = 'You can now execute "curl -K %s" to download the files' % outList
progress.setText(cmdTxt)
time.sleep(5)
|
|
a819ff1266b205414ea09fc1ac1c88ad1f8e427b
|
profiling/get_du.py
|
profiling/get_du.py
|
import sys
sys.path.append('..')
import numpy as np
import pdb
import scipy.optimize
import floq.systems.spins as spins
import floq.evolution as ev
import floq.optimization_task as ot
import floq.optimizer as opt
import floq.fidelity as fid
ncomp = 5
n = 1
freqs = 1.1*np.ones(n)+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = spins.SpinEnsemble(n, ncomp, 1.5, freqs, amps)
controls = 0.5*np.ones(2*ncomp)
systems = s.get_systems(controls, 1.5)
ev.evolve_system_with_derivatives(systems[0])
|
Add single spin dU as profiling problem
|
Add single spin dU as profiling problem
|
Python
|
mit
|
sirmarcel/floq
|
Add single spin dU as profiling problem
|
import sys
sys.path.append('..')
import numpy as np
import pdb
import scipy.optimize
import floq.systems.spins as spins
import floq.evolution as ev
import floq.optimization_task as ot
import floq.optimizer as opt
import floq.fidelity as fid
ncomp = 5
n = 1
freqs = 1.1*np.ones(n)+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = spins.SpinEnsemble(n, ncomp, 1.5, freqs, amps)
controls = 0.5*np.ones(2*ncomp)
systems = s.get_systems(controls, 1.5)
ev.evolve_system_with_derivatives(systems[0])
|
<commit_before><commit_msg>Add single spin dU as profiling problem<commit_after>
|
import sys
sys.path.append('..')
import numpy as np
import pdb
import scipy.optimize
import floq.systems.spins as spins
import floq.evolution as ev
import floq.optimization_task as ot
import floq.optimizer as opt
import floq.fidelity as fid
ncomp = 5
n = 1
freqs = 1.1*np.ones(n)+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = spins.SpinEnsemble(n, ncomp, 1.5, freqs, amps)
controls = 0.5*np.ones(2*ncomp)
systems = s.get_systems(controls, 1.5)
ev.evolve_system_with_derivatives(systems[0])
|
Add single spin dU as profiling problemimport sys
sys.path.append('..')
import numpy as np
import pdb
import scipy.optimize
import floq.systems.spins as spins
import floq.evolution as ev
import floq.optimization_task as ot
import floq.optimizer as opt
import floq.fidelity as fid
ncomp = 5
n = 1
freqs = 1.1*np.ones(n)+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = spins.SpinEnsemble(n, ncomp, 1.5, freqs, amps)
controls = 0.5*np.ones(2*ncomp)
systems = s.get_systems(controls, 1.5)
ev.evolve_system_with_derivatives(systems[0])
|
<commit_before><commit_msg>Add single spin dU as profiling problem<commit_after>import sys
sys.path.append('..')
import numpy as np
import pdb
import scipy.optimize
import floq.systems.spins as spins
import floq.evolution as ev
import floq.optimization_task as ot
import floq.optimizer as opt
import floq.fidelity as fid
ncomp = 5
n = 1
freqs = 1.1*np.ones(n)+0.05*np.random.rand(n)
amps = 1.0*np.ones(n)+0.05*np.random.rand(n)
s = spins.SpinEnsemble(n, ncomp, 1.5, freqs, amps)
controls = 0.5*np.ones(2*ncomp)
systems = s.get_systems(controls, 1.5)
ev.evolve_system_with_derivatives(systems[0])
|
|
a28a117d3b0a2f3fe6684b82a08d2b4e277314f5
|
src/auditlog_tests/test_settings.py
|
src/auditlog_tests/test_settings.py
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_test',
}
}
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests',
}
}
|
Fix typo in test settings
|
Fix typo in test settings
|
Python
|
mit
|
chris-griffin/django-auditlog,jjkester/django-auditlog,rauleb/django-auditlog,Zmeylol/auditlog,johnrtipton/django-auditlog,robmagee/django-auditlog,kbussell/django-auditlog
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_test',
}
}
Fix typo in test settings
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests',
}
}
|
<commit_before>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_test',
}
}
<commit_msg>Fix typo in test settings<commit_after>
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests',
}
}
|
"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_test',
}
}
Fix typo in test settings"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests',
}
}
|
<commit_before>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_test',
}
}
<commit_msg>Fix typo in test settings<commit_after>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests',
}
}
|
3e960bc09caa616be6f9e98f9a36d91b1b094d5e
|
examples/uart/uart_driver.py
|
examples/uart/uart_driver.py
|
import serial
import sys
def main(infile, outfile, usb_path):
print (f"Running with infile={infile}, outfile={outfile}")
with open(infile, "rb") as f:
data = f.read()
print(f"infile length={len(data)}")
ser = serial.Serial(usb_path, 115200)
ser.write(data)
res = ser.read(len(data))
ser.close()
with open(outfile, "wb") as f:
f.write(res)
def usage():
print(f"usage: {sys.argv[0]} infile outfile usb_path")
if __name__ == "__main__":
if len(sys.argv) < 4:
usage()
exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
usb_path = sys.argv[3]
main(infile, outfile, usb_path)
|
Add python driver for uart example
|
Add python driver for uart example
|
Python
|
mit
|
phanrahan/magmathon,phanrahan/magmathon
|
Add python driver for uart example
|
import serial
import sys
def main(infile, outfile, usb_path):
print (f"Running with infile={infile}, outfile={outfile}")
with open(infile, "rb") as f:
data = f.read()
print(f"infile length={len(data)}")
ser = serial.Serial(usb_path, 115200)
ser.write(data)
res = ser.read(len(data))
ser.close()
with open(outfile, "wb") as f:
f.write(res)
def usage():
print(f"usage: {sys.argv[0]} infile outfile usb_path")
if __name__ == "__main__":
if len(sys.argv) < 4:
usage()
exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
usb_path = sys.argv[3]
main(infile, outfile, usb_path)
|
<commit_before><commit_msg>Add python driver for uart example<commit_after>
|
import serial
import sys
def main(infile, outfile, usb_path):
print (f"Running with infile={infile}, outfile={outfile}")
with open(infile, "rb") as f:
data = f.read()
print(f"infile length={len(data)}")
ser = serial.Serial(usb_path, 115200)
ser.write(data)
res = ser.read(len(data))
ser.close()
with open(outfile, "wb") as f:
f.write(res)
def usage():
print(f"usage: {sys.argv[0]} infile outfile usb_path")
if __name__ == "__main__":
if len(sys.argv) < 4:
usage()
exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
usb_path = sys.argv[3]
main(infile, outfile, usb_path)
|
Add python driver for uart exampleimport serial
import sys
def main(infile, outfile, usb_path):
print (f"Running with infile={infile}, outfile={outfile}")
with open(infile, "rb") as f:
data = f.read()
print(f"infile length={len(data)}")
ser = serial.Serial(usb_path, 115200)
ser.write(data)
res = ser.read(len(data))
ser.close()
with open(outfile, "wb") as f:
f.write(res)
def usage():
print(f"usage: {sys.argv[0]} infile outfile usb_path")
if __name__ == "__main__":
if len(sys.argv) < 4:
usage()
exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
usb_path = sys.argv[3]
main(infile, outfile, usb_path)
|
<commit_before><commit_msg>Add python driver for uart example<commit_after>import serial
import sys
def main(infile, outfile, usb_path):
print (f"Running with infile={infile}, outfile={outfile}")
with open(infile, "rb") as f:
data = f.read()
print(f"infile length={len(data)}")
ser = serial.Serial(usb_path, 115200)
ser.write(data)
res = ser.read(len(data))
ser.close()
with open(outfile, "wb") as f:
f.write(res)
def usage():
print(f"usage: {sys.argv[0]} infile outfile usb_path")
if __name__ == "__main__":
if len(sys.argv) < 4:
usage()
exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
usb_path = sys.argv[3]
main(infile, outfile, usb_path)
|
|
99369dc3e39a9730c6d936ced0888a1338a57c27
|
geosoft/gxpy/tests/runall.py
|
geosoft/gxpy/tests/runall.py
|
import multiprocessing
import subprocess
import glob
import timeit
def work(test):
return (test, subprocess.call(['nosetests', '-v', test]))
_exit_code = 0
def run_all_tests():
tests = glob.glob('test_*.py')
pool = multiprocessing.Pool(processes=6)
print(pool.map(work, tests))
return 0
if __name__ == '__main__':
start_time = timeit.default_timer()
exit_code = run_all_tests()
print(timeit.default_timer() - start_time)
exit(exit_code)
|
Resolve circular imports and syntax issues to restore Python 3.4 support
|
Resolve circular imports and syntax issues to restore Python 3.4 support
|
Python
|
bsd-2-clause
|
GeosoftInc/gxpy
|
Resolve circular imports and syntax issues to restore Python 3.4 support
|
import multiprocessing
import subprocess
import glob
import timeit
def work(test):
return (test, subprocess.call(['nosetests', '-v', test]))
_exit_code = 0
def run_all_tests():
tests = glob.glob('test_*.py')
pool = multiprocessing.Pool(processes=6)
print(pool.map(work, tests))
return 0
if __name__ == '__main__':
start_time = timeit.default_timer()
exit_code = run_all_tests()
print(timeit.default_timer() - start_time)
exit(exit_code)
|
<commit_before><commit_msg>Resolve circular imports and syntax issues to restore Python 3.4 support<commit_after>
|
import multiprocessing
import subprocess
import glob
import timeit
def work(test):
return (test, subprocess.call(['nosetests', '-v', test]))
_exit_code = 0
def run_all_tests():
tests = glob.glob('test_*.py')
pool = multiprocessing.Pool(processes=6)
print(pool.map(work, tests))
return 0
if __name__ == '__main__':
start_time = timeit.default_timer()
exit_code = run_all_tests()
print(timeit.default_timer() - start_time)
exit(exit_code)
|
Resolve circular imports and syntax issues to restore Python 3.4 supportimport multiprocessing
import subprocess
import glob
import timeit
def work(test):
return (test, subprocess.call(['nosetests', '-v', test]))
_exit_code = 0
def run_all_tests():
tests = glob.glob('test_*.py')
pool = multiprocessing.Pool(processes=6)
print(pool.map(work, tests))
return 0
if __name__ == '__main__':
start_time = timeit.default_timer()
exit_code = run_all_tests()
print(timeit.default_timer() - start_time)
exit(exit_code)
|
<commit_before><commit_msg>Resolve circular imports and syntax issues to restore Python 3.4 support<commit_after>import multiprocessing
import subprocess
import glob
import timeit
def work(test):
return (test, subprocess.call(['nosetests', '-v', test]))
_exit_code = 0
def run_all_tests():
tests = glob.glob('test_*.py')
pool = multiprocessing.Pool(processes=6)
print(pool.map(work, tests))
return 0
if __name__ == '__main__':
start_time = timeit.default_timer()
exit_code = run_all_tests()
print(timeit.default_timer() - start_time)
exit(exit_code)
|
|
28dd0de65ebab3ef039a1bf1290e206205fb0dea
|
control/Control/src/balancer/initial_placement_rr.py
|
control/Control/src/balancer/initial_placement_rr.py
|
from logs import sonarlog
from workload.timeutil import * # @UnusedWildImport
import conf_domains
import conf_nodes
import initial_placement
# Setup logging
logger = sonarlog.getLogger('initial_placement')
class RRPlacement(initial_placement.InitialPlacement):
def execute(self):
# Execute super code
super(RRPlacement, self).execute()
print 'Distributing domains over all servers ...'
# Logging
logger.info('Placement strategy: Round Robin')
logger.info('Required servers: %i' % conf_nodes.NODE_COUNT)
migrations = []
assignment = {}
node_index = 0
service_index = 0
for maps in conf_domains.initial_domains:
migrations.append((maps.name, node_index))
node_index = (node_index + 1) % conf_nodes.NODE_COUNT
assignment[service_index] = node_index
service_index += 1
print 'Assignment: %s' % assignment
logger.info('Assignment: %s' % assignment)
print 'Migrations: %s' % migrations
logger.info('Migrations: %s' % migrations)
return migrations, self._count_active_servers(assignment)
|
Add rudimentary initial placement RR controller
|
Add rudimentary initial placement RR controller
|
Python
|
mit
|
jacksonicson/paper.IS2015,jacksonicson/paper.IS2015,jacksonicson/paper.IS2015,jacksonicson/paper.IS2015,jacksonicson/paper.IS2015
|
Add rudimentary initial placement RR controller
|
from logs import sonarlog
from workload.timeutil import * # @UnusedWildImport
import conf_domains
import conf_nodes
import initial_placement
# Setup logging
logger = sonarlog.getLogger('initial_placement')
class RRPlacement(initial_placement.InitialPlacement):
def execute(self):
# Execute super code
super(RRPlacement, self).execute()
print 'Distributing domains over all servers ...'
# Logging
logger.info('Placement strategy: Round Robin')
logger.info('Required servers: %i' % conf_nodes.NODE_COUNT)
migrations = []
assignment = {}
node_index = 0
service_index = 0
for maps in conf_domains.initial_domains:
migrations.append((maps.name, node_index))
node_index = (node_index + 1) % conf_nodes.NODE_COUNT
assignment[service_index] = node_index
service_index += 1
print 'Assignment: %s' % assignment
logger.info('Assignment: %s' % assignment)
print 'Migrations: %s' % migrations
logger.info('Migrations: %s' % migrations)
return migrations, self._count_active_servers(assignment)
|
<commit_before><commit_msg>Add rudimentary initial placement RR controller<commit_after>
|
from logs import sonarlog
from workload.timeutil import * # @UnusedWildImport
import conf_domains
import conf_nodes
import initial_placement
# Setup logging
logger = sonarlog.getLogger('initial_placement')
class RRPlacement(initial_placement.InitialPlacement):
def execute(self):
# Execute super code
super(RRPlacement, self).execute()
print 'Distributing domains over all servers ...'
# Logging
logger.info('Placement strategy: Round Robin')
logger.info('Required servers: %i' % conf_nodes.NODE_COUNT)
migrations = []
assignment = {}
node_index = 0
service_index = 0
for maps in conf_domains.initial_domains:
migrations.append((maps.name, node_index))
node_index = (node_index + 1) % conf_nodes.NODE_COUNT
assignment[service_index] = node_index
service_index += 1
print 'Assignment: %s' % assignment
logger.info('Assignment: %s' % assignment)
print 'Migrations: %s' % migrations
logger.info('Migrations: %s' % migrations)
return migrations, self._count_active_servers(assignment)
|
Add rudimentary initial placement RR controllerfrom logs import sonarlog
from workload.timeutil import * # @UnusedWildImport
import conf_domains
import conf_nodes
import initial_placement
# Setup logging
logger = sonarlog.getLogger('initial_placement')
class RRPlacement(initial_placement.InitialPlacement):
def execute(self):
# Execute super code
super(RRPlacement, self).execute()
print 'Distributing domains over all servers ...'
# Logging
logger.info('Placement strategy: Round Robin')
logger.info('Required servers: %i' % conf_nodes.NODE_COUNT)
migrations = []
assignment = {}
node_index = 0
service_index = 0
for maps in conf_domains.initial_domains:
migrations.append((maps.name, node_index))
node_index = (node_index + 1) % conf_nodes.NODE_COUNT
assignment[service_index] = node_index
service_index += 1
print 'Assignment: %s' % assignment
logger.info('Assignment: %s' % assignment)
print 'Migrations: %s' % migrations
logger.info('Migrations: %s' % migrations)
return migrations, self._count_active_servers(assignment)
|
<commit_before><commit_msg>Add rudimentary initial placement RR controller<commit_after>from logs import sonarlog
from workload.timeutil import * # @UnusedWildImport
import conf_domains
import conf_nodes
import initial_placement
# Setup logging
logger = sonarlog.getLogger('initial_placement')
class RRPlacement(initial_placement.InitialPlacement):
def execute(self):
# Execute super code
super(RRPlacement, self).execute()
print 'Distributing domains over all servers ...'
# Logging
logger.info('Placement strategy: Round Robin')
logger.info('Required servers: %i' % conf_nodes.NODE_COUNT)
migrations = []
assignment = {}
node_index = 0
service_index = 0
for maps in conf_domains.initial_domains:
migrations.append((maps.name, node_index))
node_index = (node_index + 1) % conf_nodes.NODE_COUNT
assignment[service_index] = node_index
service_index += 1
print 'Assignment: %s' % assignment
logger.info('Assignment: %s' % assignment)
print 'Migrations: %s' % migrations
logger.info('Migrations: %s' % migrations)
return migrations, self._count_active_servers(assignment)
|
|
ba1198c4a622aa4bb039355043a3eab5b8f3614c
|
tests/test_exporters.py
|
tests/test_exporters.py
|
# -*- coding: utf-8 -*-
"""Tests for custom exporters."""
from nbconvert.tests.base import TestsBase
from nbformat import v4, write
import io
import os
class TestNbConvertExporters(TestsBase):
def test_embedhtml(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source=""),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html'
' "notebook2"')
statinfo = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
os.remove('notebook2.html')
self.nbconvert('--to html_embed'
' "notebook2"')
statinfo_e = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
assert statinfo_e.st_size > statinfo.st_size
def test_htmltoc2(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source="# Heading"),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html_toc'
' "notebook2"')
assert os.path.isfile('notebook2.html')
|
Add tests for toc2 and html_embed exporters
|
Add tests for toc2 and html_embed exporters
|
Python
|
bsd-3-clause
|
juhasch/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions
|
Add tests for toc2 and html_embed exporters
|
# -*- coding: utf-8 -*-
"""Tests for custom exporters."""
from nbconvert.tests.base import TestsBase
from nbformat import v4, write
import io
import os
class TestNbConvertExporters(TestsBase):
def test_embedhtml(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source=""),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html'
' "notebook2"')
statinfo = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
os.remove('notebook2.html')
self.nbconvert('--to html_embed'
' "notebook2"')
statinfo_e = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
assert statinfo_e.st_size > statinfo.st_size
def test_htmltoc2(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source="# Heading"),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html_toc'
' "notebook2"')
assert os.path.isfile('notebook2.html')
|
<commit_before><commit_msg>Add tests for toc2 and html_embed exporters<commit_after>
|
# -*- coding: utf-8 -*-
"""Tests for custom exporters."""
from nbconvert.tests.base import TestsBase
from nbformat import v4, write
import io
import os
class TestNbConvertExporters(TestsBase):
def test_embedhtml(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source=""),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html'
' "notebook2"')
statinfo = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
os.remove('notebook2.html')
self.nbconvert('--to html_embed'
' "notebook2"')
statinfo_e = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
assert statinfo_e.st_size > statinfo.st_size
def test_htmltoc2(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source="# Heading"),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html_toc'
' "notebook2"')
assert os.path.isfile('notebook2.html')
|
Add tests for toc2 and html_embed exporters# -*- coding: utf-8 -*-
"""Tests for custom exporters."""
from nbconvert.tests.base import TestsBase
from nbformat import v4, write
import io
import os
class TestNbConvertExporters(TestsBase):
def test_embedhtml(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source=""),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html'
' "notebook2"')
statinfo = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
os.remove('notebook2.html')
self.nbconvert('--to html_embed'
' "notebook2"')
statinfo_e = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
assert statinfo_e.st_size > statinfo.st_size
def test_htmltoc2(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source="# Heading"),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html_toc'
' "notebook2"')
assert os.path.isfile('notebook2.html')
|
<commit_before><commit_msg>Add tests for toc2 and html_embed exporters<commit_after># -*- coding: utf-8 -*-
"""Tests for custom exporters."""
from nbconvert.tests.base import TestsBase
from nbformat import v4, write
import io
import os
class TestNbConvertExporters(TestsBase):
def test_embedhtml(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source=""),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html'
' "notebook2"')
statinfo = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
os.remove('notebook2.html')
self.nbconvert('--to html_embed'
' "notebook2"')
statinfo_e = os.stat('notebook2.html')
assert os.path.isfile('notebook2.html')
assert statinfo_e.st_size > statinfo.st_size
def test_htmltoc2(self):
with self.create_temp_cwd():
nb = v4.new_notebook(cells=[
v4.new_code_cell(source="a = 'world'"),
v4.new_markdown_cell(source="# Heading"),
])
with io.open('notebook2.ipynb', 'w', encoding='utf-8') as f:
write(nb, f, 4)
self.nbconvert('--to html_toc'
' "notebook2"')
assert os.path.isfile('notebook2.html')
|
|
e91079ecab2d324ae5428239bcf792ab149faa4a
|
jacquard/storage/tests/test_dummy.py
|
jacquard/storage/tests/test_dummy.py
|
import unittest
from jacquard.storage.dummy import DummyStore
from jacquard.storage.testing_utils import StorageGauntlet
class DummyGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return DummyStore('')
|
Cover the dummy storage engine with tests
|
Cover the dummy storage engine with tests
|
Python
|
mit
|
prophile/jacquard,prophile/jacquard
|
Cover the dummy storage engine with tests
|
import unittest
from jacquard.storage.dummy import DummyStore
from jacquard.storage.testing_utils import StorageGauntlet
class DummyGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return DummyStore('')
|
<commit_before><commit_msg>Cover the dummy storage engine with tests<commit_after>
|
import unittest
from jacquard.storage.dummy import DummyStore
from jacquard.storage.testing_utils import StorageGauntlet
class DummyGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return DummyStore('')
|
Cover the dummy storage engine with testsimport unittest
from jacquard.storage.dummy import DummyStore
from jacquard.storage.testing_utils import StorageGauntlet
class DummyGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return DummyStore('')
|
<commit_before><commit_msg>Cover the dummy storage engine with tests<commit_after>import unittest
from jacquard.storage.dummy import DummyStore
from jacquard.storage.testing_utils import StorageGauntlet
class DummyGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return DummyStore('')
|
|
ef010cd80984437e591e85d957223286b703d6c4
|
tools/convert-url-history.py
|
tools/convert-url-history.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
Add initial URL history importer
|
Add initial URL history importer
|
Python
|
apache-2.0
|
rogers0/namebench,google/namebench,protron/namebench,google/namebench,google/namebench
|
Add initial URL history importer
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
<commit_before><commit_msg>Add initial URL history importer<commit_after>
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
Add initial URL history importer#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
<commit_before><commit_msg>Add initial URL history importer<commit_after>#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
|
3f3bb40f0913e6ddd57b8c0bfa94aecb4a3b5dda
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/env python
import unittest
import argparse
import logging
def run():
parser = argparse.ArgumentParser()
parser.add_argument('--print-log', action='store_true',
help='Print the log.')
args = parser.parse_args()
if args.print_log:
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s %(levelname)s:%(name)s:'
'%(funcName)s:'
'%(filename)s(%(lineno)d):'
'%(threadName)s(%(thread)d):%(message)s'))
unittest.main(argv=[''])
if __name__ == '__main__':
run()
|
Create initial test runner script.
|
Create initial test runner script.
|
Python
|
agpl-3.0
|
izrik/wikiware,izrik/wikiware,izrik/blogware,izrik/blogware,izrik/wikiware,izrik/blogware
|
Create initial test runner script.
|
#!/usr/bin/env python
import unittest
import argparse
import logging
def run():
parser = argparse.ArgumentParser()
parser.add_argument('--print-log', action='store_true',
help='Print the log.')
args = parser.parse_args()
if args.print_log:
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s %(levelname)s:%(name)s:'
'%(funcName)s:'
'%(filename)s(%(lineno)d):'
'%(threadName)s(%(thread)d):%(message)s'))
unittest.main(argv=[''])
if __name__ == '__main__':
run()
|
<commit_before><commit_msg>Create initial test runner script.<commit_after>
|
#!/usr/bin/env python
import unittest
import argparse
import logging
def run():
parser = argparse.ArgumentParser()
parser.add_argument('--print-log', action='store_true',
help='Print the log.')
args = parser.parse_args()
if args.print_log:
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s %(levelname)s:%(name)s:'
'%(funcName)s:'
'%(filename)s(%(lineno)d):'
'%(threadName)s(%(thread)d):%(message)s'))
unittest.main(argv=[''])
if __name__ == '__main__':
run()
|
Create initial test runner script.#!/usr/bin/env python
import unittest
import argparse
import logging
def run():
parser = argparse.ArgumentParser()
parser.add_argument('--print-log', action='store_true',
help='Print the log.')
args = parser.parse_args()
if args.print_log:
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s %(levelname)s:%(name)s:'
'%(funcName)s:'
'%(filename)s(%(lineno)d):'
'%(threadName)s(%(thread)d):%(message)s'))
unittest.main(argv=[''])
if __name__ == '__main__':
run()
|
<commit_before><commit_msg>Create initial test runner script.<commit_after>#!/usr/bin/env python
import unittest
import argparse
import logging
def run():
parser = argparse.ArgumentParser()
parser.add_argument('--print-log', action='store_true',
help='Print the log.')
args = parser.parse_args()
if args.print_log:
logging.basicConfig(level=logging.DEBUG,
format=('%(asctime)s %(levelname)s:%(name)s:'
'%(funcName)s:'
'%(filename)s(%(lineno)d):'
'%(threadName)s(%(thread)d):%(message)s'))
unittest.main(argv=[''])
if __name__ == '__main__':
run()
|
|
7d24695c7e94e787b5d66854db7cc6dc1abcbf10
|
polyaxon/tracker/publish_tracker.py
|
polyaxon/tracker/publish_tracker.py
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
if not self.cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
self.get_cluster_id(),
event.serialize(dumps=False),
)
self.analytics.track(
self.get_cluster_id(),
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
cluster_id = self.get_cluster_id()
if not cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
cluster_id,
event.serialize(dumps=False),
)
self.analytics.track(
cluster_id,
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
Update check on cluster id
|
Update check on cluster id
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
if not self.cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
self.get_cluster_id(),
event.serialize(dumps=False),
)
self.analytics.track(
self.get_cluster_id(),
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
Update check on cluster id
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
cluster_id = self.get_cluster_id()
if not cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
cluster_id,
event.serialize(dumps=False),
)
self.analytics.track(
cluster_id,
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
<commit_before>import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
if not self.cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
self.get_cluster_id(),
event.serialize(dumps=False),
)
self.analytics.track(
self.get_cluster_id(),
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
<commit_msg>Update check on cluster id<commit_after>
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
cluster_id = self.get_cluster_id()
if not cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
cluster_id,
event.serialize(dumps=False),
)
self.analytics.track(
cluster_id,
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
if not self.cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
self.get_cluster_id(),
event.serialize(dumps=False),
)
self.analytics.track(
self.get_cluster_id(),
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
Update check on cluster idimport analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
cluster_id = self.get_cluster_id()
if not cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
cluster_id,
event.serialize(dumps=False),
)
self.analytics.track(
cluster_id,
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
<commit_before>import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
if not self.cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
self.get_cluster_id(),
event.serialize(dumps=False),
)
self.analytics.track(
self.get_cluster_id(),
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
<commit_msg>Update check on cluster id<commit_after>import analytics
from django.db import InterfaceError, OperationalError, ProgrammingError
from tracker.service import TrackerService
class PublishTrackerService(TrackerService):
def __init__(self, key=''):
self.cluster_id = None
self.analytics = analytics
self.analytics.write_key = key
def get_cluster_id(self):
if self.cluster_id:
return self.cluster_id
from clusters.models import Cluster
try:
cluster_uuid = Cluster.load().uuid.hex
self.cluster_id = cluster_uuid
except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError):
pass
return self.cluster_id
def record_event(self, event):
cluster_id = self.get_cluster_id()
if not cluster_id:
return
if event.event_type == 'cluster.created':
self.analytics.identify(
cluster_id,
event.serialize(dumps=False),
)
self.analytics.track(
cluster_id,
event.event_type,
event.serialize(dumps=False),
)
def setup(self):
super(PublishTrackerService, self).setup()
self.cluster_id = self.get_cluster_id()
|
261a554647a8b220ce87155fd05acd8d1dda0914
|
test/storage/test_google_storage.py
|
test/storage/test_google_storage.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import httplib
import unittest
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from test.storage.test_s3 import S3Tests, S3MockHttp
from test.file_fixtures import StorageFileFixtures
from test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
Add Google Storage storage tests. Tests are based on the S3 ones.
|
Add Google Storage storage tests. Tests are based on the S3 ones.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1179577 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
niteoweb/libcloud,cryptickp/libcloud,techhat/libcloud,Verizon/libcloud,mathspace/libcloud,samuelchong/libcloud,vongazman/libcloud,andrewsomething/libcloud,pquentin/libcloud,kater169/libcloud,JamesGuthrie/libcloud,sahildua2305/libcloud,pantheon-systems/libcloud,pquentin/libcloud,watermelo/libcloud,Keisuke69/libcloud,ZuluPro/libcloud,illfelder/libcloud,curoverse/libcloud,aleGpereira/libcloud,pantheon-systems/libcloud,Itxaka/libcloud,thesquelched/libcloud,ByteInternet/libcloud,illfelder/libcloud,erjohnso/libcloud,mbrukman/libcloud,iPlantCollaborativeOpenSource/libcloud,cloudControl/libcloud,sahildua2305/libcloud,carletes/libcloud,jimbobhickville/libcloud,niteoweb/libcloud,cloudControl/libcloud,Jc2k/libcloud,marcinzaremba/libcloud,Itxaka/libcloud,jerryblakley/libcloud,ByteInternet/libcloud,sfriesel/libcloud,dcorbacho/libcloud,MrBasset/libcloud,thesquelched/libcloud,marcinzaremba/libcloud,Kami/libcloud,briancurtin/libcloud,sergiorua/libcloud,wrigri/libcloud,apache/libcloud,Cloud-Elasticity-Services/as-libcloud,briancurtin/libcloud,StackPointCloud/libcloud,smaffulli/libcloud,MrBasset/libcloud,wrigri/libcloud,JamesGuthrie/libcloud,Scalr/libcloud,Cloud-Elasticity-Services/as-libcloud,Keisuke69/libcloud,mathspace/libcloud,Kami/libcloud,dcorbacho/libcloud,wido/libcloud,schaubl/libcloud,ClusterHQ/libcloud,mgogoulos/libcloud,thesquelched/libcloud,NexusIS/libcloud,ZuluPro/libcloud,t-tran/libcloud,mistio/libcloud,andrewsomething/libcloud,wuyuewen/libcloud,mbrukman/libcloud,watermelo/libcloud,supertom/libcloud,smaffulli/libcloud,iPlantCollaborativeOpenSource/libcloud,wido/libcloud,Verizon/libcloud,illfelder/libcloud,erjohnso/libcloud,cryptickp/libcloud,aviweit/libcloud,SecurityCompass/libcloud,SecurityCompass/libcloud,mistio/libcloud,niteoweb/libcloud,mtekel/libcloud,sgammon/libcloud,NexusIS/libcloud,andrewsomething/libcloud,apache/libcloud,aviweit/libcloud,Scalr/libcloud,mistio/libcloud,DimensionDataCBUSydney/libcloud,carletes/libcloud,carletes/libcloud,pquentin/libcloud,samuelchong/libcloud,ByteInternet/libcloud,techhat/libcloud,ninefold/libcloud,vongazman/libcloud,kater169/libcloud,jerryblakley/libcloud,Itxaka/libcloud,marcinzaremba/libcloud,mgogoulos/libcloud,watermelo/libcloud,ninefold/libcloud,iPlantCollaborativeOpenSource/libcloud,jerryblakley/libcloud,curoverse/libcloud,munkiat/libcloud,Scalr/libcloud,ClusterHQ/libcloud,sfriesel/libcloud,Kami/libcloud,jimbobhickville/libcloud,SecurityCompass/libcloud,smaffulli/libcloud,sergiorua/libcloud,wrigri/libcloud,apache/libcloud,erjohnso/libcloud,schaubl/libcloud,mathspace/libcloud,ZuluPro/libcloud,munkiat/libcloud,Verizon/libcloud,JamesGuthrie/libcloud,mbrukman/libcloud,mgogoulos/libcloud,t-tran/libcloud,t-tran/libcloud,atsaki/libcloud,mtekel/libcloud,wuyuewen/libcloud,jimbobhickville/libcloud,wuyuewen/libcloud,curoverse/libcloud,dcorbacho/libcloud,Jc2k/libcloud,StackPointCloud/libcloud,Cloud-Elasticity-Services/as-libcloud,StackPointCloud/libcloud,techhat/libcloud,aleGpereira/libcloud,sergiorua/libcloud,pantheon-systems/libcloud,vongazman/libcloud,kater169/libcloud,atsaki/libcloud,supertom/libcloud,supertom/libcloud,atsaki/libcloud,mtekel/libcloud,lochiiconnectivity/libcloud,lochiiconnectivity/libcloud,DimensionDataCBUSydney/libcloud,cloudControl/libcloud,DimensionDataCBUSydney/libcloud,NexusIS/libcloud,lochiiconnectivity/libcloud,schaubl/libcloud,sfriesel/libcloud,aviweit/libcloud,briancurtin/libcloud,munkiat/libcloud,cryptickp/libcloud,sahildua2305/libcloud,MrBasset/libcloud,samuelchong/libcloud,wido/libcloud,aleGpereira/libcloud,sgammon/libcloud
|
Add Google Storage storage tests. Tests are based on the S3 ones.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1179577 13f79535-47bb-0310-9956-ffa450edef68
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import httplib
import unittest
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from test.storage.test_s3 import S3Tests, S3MockHttp
from test.file_fixtures import StorageFileFixtures
from test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
<commit_before><commit_msg>Add Google Storage storage tests. Tests are based on the S3 ones.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1179577 13f79535-47bb-0310-9956-ffa450edef68<commit_after>
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import httplib
import unittest
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from test.storage.test_s3 import S3Tests, S3MockHttp
from test.file_fixtures import StorageFileFixtures
from test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
Add Google Storage storage tests. Tests are based on the S3 ones.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1179577 13f79535-47bb-0310-9956-ffa450edef68# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import httplib
import unittest
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from test.storage.test_s3 import S3Tests, S3MockHttp
from test.file_fixtures import StorageFileFixtures
from test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
<commit_before><commit_msg>Add Google Storage storage tests. Tests are based on the S3 ones.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1179577 13f79535-47bb-0310-9956-ffa450edef68<commit_after># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import httplib
import unittest
from libcloud.storage.drivers.google_storage import GoogleStorageDriver
from test.storage.test_s3 import S3Tests, S3MockHttp
from test.file_fixtures import StorageFileFixtures
from test.secrets import STORAGE_GOOGLE_STORAGE_PARAMS
class GoogleStorageMockHttp(S3MockHttp):
fixtures = StorageFileFixtures('google_storage')
class GoogleStorageTests(S3Tests):
driver_type = GoogleStorageDriver
driver_args = STORAGE_GOOGLE_STORAGE_PARAMS
mock_response_klass = GoogleStorageMockHttp
def test_billing_not_enabled(self):
# TODO
pass
if __name__ == '__main__':
sys.exit(unittest.main())
|
|
44b068dbfee594babe72462476ddb9b94c17bb16
|
tests/test_spawner/test_env_vars.py
|
tests/test_spawner/test_env_vars.py
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from unittest.mock import MagicMock
from scheduler.spawners.templates.env_vars import get_resources_env_vars
class TestEnvVars(TestCase):
def test_get_resources_env_vars(self):
vars = get_resources_env_vars(None)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu = None
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu.limits = '0'
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
|
Add unit test for function get_resources_env_vars
|
Add unit test for function get_resources_env_vars
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
Add unit test for function get_resources_env_vars
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from unittest.mock import MagicMock
from scheduler.spawners.templates.env_vars import get_resources_env_vars
class TestEnvVars(TestCase):
def test_get_resources_env_vars(self):
vars = get_resources_env_vars(None)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu = None
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu.limits = '0'
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
|
<commit_before><commit_msg>Add unit test for function get_resources_env_vars<commit_after>
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from unittest.mock import MagicMock
from scheduler.spawners.templates.env_vars import get_resources_env_vars
class TestEnvVars(TestCase):
def test_get_resources_env_vars(self):
vars = get_resources_env_vars(None)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu = None
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu.limits = '0'
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
|
Add unit test for function get_resources_env_vars# -*- coding: utf-8 -*-
from unittest import TestCase
from unittest.mock import MagicMock
from scheduler.spawners.templates.env_vars import get_resources_env_vars
class TestEnvVars(TestCase):
def test_get_resources_env_vars(self):
vars = get_resources_env_vars(None)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu = None
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu.limits = '0'
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
|
<commit_before><commit_msg>Add unit test for function get_resources_env_vars<commit_after># -*- coding: utf-8 -*-
from unittest import TestCase
from unittest.mock import MagicMock
from scheduler.spawners.templates.env_vars import get_resources_env_vars
class TestEnvVars(TestCase):
def test_get_resources_env_vars(self):
vars = get_resources_env_vars(None)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu = None
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
resources = MagicMock()
resources.gpu.limits = '0'
vars = get_resources_env_vars(resources)
assert len(vars)
assert vars[0].name == 'NVIDIA_VISIBLE_DEVICES'
assert vars[0].value == 'none'
|
|
75740fe37174230faa7d9a434c844d4aa88e4091
|
python/ImagePlot.py
|
python/ImagePlot.py
|
from PIL import Image, ImageDraw
import sys
print(sys.argv);
if len(sys.argv) < 3 :
print("File path need to be passed as 1st argument")
exit()
im = Image.open(sys.argv[1])
draw = ImageDraw.Draw(im)
try:
#fp = open(sys.argv[1])
with open(sys.argv[2]) as fp:
lines = fp.readlines()
for line in lines:
xy = [x.strip() for x in line.split(',')];
print(xy);
#draw.point([(float(xy[0]), float(xy[1]))], fill='red')
draw.ellipse([(float(xy[0])-3, float(xy[1])-3),(float(xy[0])+3, float(xy[1])+3)], fill='red')
del draw
im.save("output.png")
# do stuff here
finally:
fp.close()
#del draw
# write to stdout
|
Add python script to draw points on an image
|
Add python script to draw points on an image
|
Python
|
mit
|
NaturalIntelligence/imglab,NaturalIntelligence/imglab
|
Add python script to draw points on an image
|
from PIL import Image, ImageDraw
import sys
print(sys.argv);
if len(sys.argv) < 3 :
print("File path need to be passed as 1st argument")
exit()
im = Image.open(sys.argv[1])
draw = ImageDraw.Draw(im)
try:
#fp = open(sys.argv[1])
with open(sys.argv[2]) as fp:
lines = fp.readlines()
for line in lines:
xy = [x.strip() for x in line.split(',')];
print(xy);
#draw.point([(float(xy[0]), float(xy[1]))], fill='red')
draw.ellipse([(float(xy[0])-3, float(xy[1])-3),(float(xy[0])+3, float(xy[1])+3)], fill='red')
del draw
im.save("output.png")
# do stuff here
finally:
fp.close()
#del draw
# write to stdout
|
<commit_before><commit_msg>Add python script to draw points on an image<commit_after>
|
from PIL import Image, ImageDraw
import sys
print(sys.argv);
if len(sys.argv) < 3 :
print("File path need to be passed as 1st argument")
exit()
im = Image.open(sys.argv[1])
draw = ImageDraw.Draw(im)
try:
#fp = open(sys.argv[1])
with open(sys.argv[2]) as fp:
lines = fp.readlines()
for line in lines:
xy = [x.strip() for x in line.split(',')];
print(xy);
#draw.point([(float(xy[0]), float(xy[1]))], fill='red')
draw.ellipse([(float(xy[0])-3, float(xy[1])-3),(float(xy[0])+3, float(xy[1])+3)], fill='red')
del draw
im.save("output.png")
# do stuff here
finally:
fp.close()
#del draw
# write to stdout
|
Add python script to draw points on an imagefrom PIL import Image, ImageDraw
import sys
print(sys.argv);
if len(sys.argv) < 3 :
print("File path need to be passed as 1st argument")
exit()
im = Image.open(sys.argv[1])
draw = ImageDraw.Draw(im)
try:
#fp = open(sys.argv[1])
with open(sys.argv[2]) as fp:
lines = fp.readlines()
for line in lines:
xy = [x.strip() for x in line.split(',')];
print(xy);
#draw.point([(float(xy[0]), float(xy[1]))], fill='red')
draw.ellipse([(float(xy[0])-3, float(xy[1])-3),(float(xy[0])+3, float(xy[1])+3)], fill='red')
del draw
im.save("output.png")
# do stuff here
finally:
fp.close()
#del draw
# write to stdout
|
<commit_before><commit_msg>Add python script to draw points on an image<commit_after>from PIL import Image, ImageDraw
import sys
print(sys.argv);
if len(sys.argv) < 3 :
print("File path need to be passed as 1st argument")
exit()
im = Image.open(sys.argv[1])
draw = ImageDraw.Draw(im)
try:
#fp = open(sys.argv[1])
with open(sys.argv[2]) as fp:
lines = fp.readlines()
for line in lines:
xy = [x.strip() for x in line.split(',')];
print(xy);
#draw.point([(float(xy[0]), float(xy[1]))], fill='red')
draw.ellipse([(float(xy[0])-3, float(xy[1])-3),(float(xy[0])+3, float(xy[1])+3)], fill='red')
del draw
im.save("output.png")
# do stuff here
finally:
fp.close()
#del draw
# write to stdout
|
|
21cb0df67afeb3a02ad44290f8100ed875c02e5b
|
scripts/echo_usb.py
|
scripts/echo_usb.py
|
#!/usr/bin/env python
import serial
import os
import sys
import time
# Open a serial port
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
tries = 0
while True:
try:
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
break
except Exception as e:
print 'Failed to connect to device waiting 3 seconds then trying again'
print e
tries += 1
if tries >= 10:
print 'Failed to connect to device 10 times exiting now'
sys.exit()
time.sleep(3)
# Send data to start USB OTG
print 'Starting echo'
ser.write("start")
# Read until we see the finished text
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
finally:
# Close the serial port
ser.close()
|
Add a script to spit usb to terminal
|
SCRIPTS: Add a script to spit usb to terminal
|
Python
|
mit
|
fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project,fnivek/eecs567-final-project
|
SCRIPTS: Add a script to spit usb to terminal
|
#!/usr/bin/env python
import serial
import os
import sys
import time
# Open a serial port
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
tries = 0
while True:
try:
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
break
except Exception as e:
print 'Failed to connect to device waiting 3 seconds then trying again'
print e
tries += 1
if tries >= 10:
print 'Failed to connect to device 10 times exiting now'
sys.exit()
time.sleep(3)
# Send data to start USB OTG
print 'Starting echo'
ser.write("start")
# Read until we see the finished text
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
finally:
# Close the serial port
ser.close()
|
<commit_before><commit_msg>SCRIPTS: Add a script to spit usb to terminal<commit_after>
|
#!/usr/bin/env python
import serial
import os
import sys
import time
# Open a serial port
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
tries = 0
while True:
try:
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
break
except Exception as e:
print 'Failed to connect to device waiting 3 seconds then trying again'
print e
tries += 1
if tries >= 10:
print 'Failed to connect to device 10 times exiting now'
sys.exit()
time.sleep(3)
# Send data to start USB OTG
print 'Starting echo'
ser.write("start")
# Read until we see the finished text
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
finally:
# Close the serial port
ser.close()
|
SCRIPTS: Add a script to spit usb to terminal#!/usr/bin/env python
import serial
import os
import sys
import time
# Open a serial port
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
tries = 0
while True:
try:
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
break
except Exception as e:
print 'Failed to connect to device waiting 3 seconds then trying again'
print e
tries += 1
if tries >= 10:
print 'Failed to connect to device 10 times exiting now'
sys.exit()
time.sleep(3)
# Send data to start USB OTG
print 'Starting echo'
ser.write("start")
# Read until we see the finished text
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
finally:
# Close the serial port
ser.close()
|
<commit_before><commit_msg>SCRIPTS: Add a script to spit usb to terminal<commit_after>#!/usr/bin/env python
import serial
import os
import sys
import time
# Open a serial port
print 'Connecting to /dev/serial/by-id/usb-eecs567_final_project-if00'
tries = 0
while True:
try:
ser = serial.Serial("/dev/serial/by-id/usb-eecs567_final_project-if00", 115200)
break
except Exception as e:
print 'Failed to connect to device waiting 3 seconds then trying again'
print e
tries += 1
if tries >= 10:
print 'Failed to connect to device 10 times exiting now'
sys.exit()
time.sleep(3)
# Send data to start USB OTG
print 'Starting echo'
ser.write("start")
# Read until we see the finished text
try:
while True:
num_chars = ser.inWaiting()
if num_chars:
new = ''
try:
new = ser.read(num_chars)
except:
print '\nFailed to read'
sys.stdout.write(new)
finally:
# Close the serial port
ser.close()
|
|
c5747c2cf7d59b9905e2bedddbc9cf791c300b05
|
scripts/create_initial_admin_user.py
|
scripts/create_initial_admin_user.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.authentication.password import service as password_service
from byceps.services.user import service as user_service
from bootstrap.util import app_context, get_config_name_from_env
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address)
password_service.create_password_hash(user.id, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address):
try:
user = user_service.build_user(screen_name, email_address)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_name = get_config_name_from_env()
with app_context(config_name):
execute()
|
Add script to create an activated user from the command line
|
Add script to create an activated user from the command line
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
|
Add script to create an activated user from the command line
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.authentication.password import service as password_service
from byceps.services.user import service as user_service
from bootstrap.util import app_context, get_config_name_from_env
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address)
password_service.create_password_hash(user.id, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address):
try:
user = user_service.build_user(screen_name, email_address)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_name = get_config_name_from_env()
with app_context(config_name):
execute()
|
<commit_before><commit_msg>Add script to create an activated user from the command line<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.authentication.password import service as password_service
from byceps.services.user import service as user_service
from bootstrap.util import app_context, get_config_name_from_env
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address)
password_service.create_password_hash(user.id, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address):
try:
user = user_service.build_user(screen_name, email_address)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_name = get_config_name_from_env()
with app_context(config_name):
execute()
|
Add script to create an activated user from the command line#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.authentication.password import service as password_service
from byceps.services.user import service as user_service
from bootstrap.util import app_context, get_config_name_from_env
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address)
password_service.create_password_hash(user.id, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address):
try:
user = user_service.build_user(screen_name, email_address)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_name = get_config_name_from_env()
with app_context(config_name):
execute()
|
<commit_before><commit_msg>Add script to create an activated user from the command line<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Create an initial user with admin privileges to begin BYCEPS setup.
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.authentication.password import service as password_service
from byceps.services.user import service as user_service
from bootstrap.util import app_context, get_config_name_from_env
@click.command()
@click.option('--screen_name', prompt=True)
@click.option('--email_address', prompt=True)
@click.option('--password', prompt=True, hide_input=True)
def execute(screen_name, email_address, password):
click.echo('Creating user "{}" ... '.format(screen_name), nl=False)
user = _create_user(screen_name, email_address)
password_service.create_password_hash(user.id, password)
click.secho('done.', fg='green')
def _create_user(screen_name, email_address):
try:
user = user_service.build_user(screen_name, email_address)
except ValueError as e:
raise click.UsageError(e)
user.enabled = True
db.session.add(user)
db.session.commit()
return user
if __name__ == '__main__':
config_name = get_config_name_from_env()
with app_context(config_name):
execute()
|
|
3fab900ad5d31941ec0f05d9a10e9f9a63bf8853
|
MComponents/MTestComponent.py
|
MComponents/MTestComponent.py
|
__author__ = 'Samvid Mistry'
from PySide.QtCore import *
from PySide.QtGui import *
from MAnimations.MCircularReveal import MCircularReveal
from MComponents.MShape import MShape
from MUtilities import MColors
class MTestComponent(MShape):
def __init__(self):
MShape.__init__(self)
self.max_width = 100
self.max_height = 100
self.width = 100
self.height = 100
self.__color = MColors.PRIMARY_COLOR
self.__pen = QPen(self.__color, 0)
self.__painter = QPainter()
self.__reveal = MCircularReveal()
self.__reveal.duration = 100
self.__reveal.add_target(self)
self.__bounding_rect = QRect(10, 15, self.width, self.height)
def paintEvent(self, event):
self.__painter.begin(self)
self.__painter.setRenderHint(QPainter.Antialiasing)
self.__painter.setPen(self.__pen)
self.__painter.setBrush(self.__color)
if self.clip is not None:
self.__painter.setClipPath(self.clip)
self.__painter.drawRect(self.__bounding_rect)
self.__painter.end()
def mousePressEvent(self, event):
self.__reveal.start()
|
Test component for running animation tests
|
Test component for running animation tests
|
Python
|
mit
|
GelaniNijraj/PyMaterial,samvidmistry/PyMaterial
|
Test component for running animation tests
|
__author__ = 'Samvid Mistry'
from PySide.QtCore import *
from PySide.QtGui import *
from MAnimations.MCircularReveal import MCircularReveal
from MComponents.MShape import MShape
from MUtilities import MColors
class MTestComponent(MShape):
def __init__(self):
MShape.__init__(self)
self.max_width = 100
self.max_height = 100
self.width = 100
self.height = 100
self.__color = MColors.PRIMARY_COLOR
self.__pen = QPen(self.__color, 0)
self.__painter = QPainter()
self.__reveal = MCircularReveal()
self.__reveal.duration = 100
self.__reveal.add_target(self)
self.__bounding_rect = QRect(10, 15, self.width, self.height)
def paintEvent(self, event):
self.__painter.begin(self)
self.__painter.setRenderHint(QPainter.Antialiasing)
self.__painter.setPen(self.__pen)
self.__painter.setBrush(self.__color)
if self.clip is not None:
self.__painter.setClipPath(self.clip)
self.__painter.drawRect(self.__bounding_rect)
self.__painter.end()
def mousePressEvent(self, event):
self.__reveal.start()
|
<commit_before><commit_msg>Test component for running animation tests<commit_after>
|
__author__ = 'Samvid Mistry'
from PySide.QtCore import *
from PySide.QtGui import *
from MAnimations.MCircularReveal import MCircularReveal
from MComponents.MShape import MShape
from MUtilities import MColors
class MTestComponent(MShape):
def __init__(self):
MShape.__init__(self)
self.max_width = 100
self.max_height = 100
self.width = 100
self.height = 100
self.__color = MColors.PRIMARY_COLOR
self.__pen = QPen(self.__color, 0)
self.__painter = QPainter()
self.__reveal = MCircularReveal()
self.__reveal.duration = 100
self.__reveal.add_target(self)
self.__bounding_rect = QRect(10, 15, self.width, self.height)
def paintEvent(self, event):
self.__painter.begin(self)
self.__painter.setRenderHint(QPainter.Antialiasing)
self.__painter.setPen(self.__pen)
self.__painter.setBrush(self.__color)
if self.clip is not None:
self.__painter.setClipPath(self.clip)
self.__painter.drawRect(self.__bounding_rect)
self.__painter.end()
def mousePressEvent(self, event):
self.__reveal.start()
|
Test component for running animation tests__author__ = 'Samvid Mistry'
from PySide.QtCore import *
from PySide.QtGui import *
from MAnimations.MCircularReveal import MCircularReveal
from MComponents.MShape import MShape
from MUtilities import MColors
class MTestComponent(MShape):
def __init__(self):
MShape.__init__(self)
self.max_width = 100
self.max_height = 100
self.width = 100
self.height = 100
self.__color = MColors.PRIMARY_COLOR
self.__pen = QPen(self.__color, 0)
self.__painter = QPainter()
self.__reveal = MCircularReveal()
self.__reveal.duration = 100
self.__reveal.add_target(self)
self.__bounding_rect = QRect(10, 15, self.width, self.height)
def paintEvent(self, event):
self.__painter.begin(self)
self.__painter.setRenderHint(QPainter.Antialiasing)
self.__painter.setPen(self.__pen)
self.__painter.setBrush(self.__color)
if self.clip is not None:
self.__painter.setClipPath(self.clip)
self.__painter.drawRect(self.__bounding_rect)
self.__painter.end()
def mousePressEvent(self, event):
self.__reveal.start()
|
<commit_before><commit_msg>Test component for running animation tests<commit_after>__author__ = 'Samvid Mistry'
from PySide.QtCore import *
from PySide.QtGui import *
from MAnimations.MCircularReveal import MCircularReveal
from MComponents.MShape import MShape
from MUtilities import MColors
class MTestComponent(MShape):
def __init__(self):
MShape.__init__(self)
self.max_width = 100
self.max_height = 100
self.width = 100
self.height = 100
self.__color = MColors.PRIMARY_COLOR
self.__pen = QPen(self.__color, 0)
self.__painter = QPainter()
self.__reveal = MCircularReveal()
self.__reveal.duration = 100
self.__reveal.add_target(self)
self.__bounding_rect = QRect(10, 15, self.width, self.height)
def paintEvent(self, event):
self.__painter.begin(self)
self.__painter.setRenderHint(QPainter.Antialiasing)
self.__painter.setPen(self.__pen)
self.__painter.setBrush(self.__color)
if self.clip is not None:
self.__painter.setClipPath(self.clip)
self.__painter.drawRect(self.__bounding_rect)
self.__painter.end()
def mousePressEvent(self, event):
self.__reveal.start()
|
|
76254f7f76915d4f21ec8406313e998ddbafffe2
|
app/svm_prediction.py
|
app/svm_prediction.py
|
#!/usr/bin/env python
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import validate_dataset as validate
# Make predictions using Support Vector Machines
svm = SVC()
# load train data from validate
svm.fit(validate.X_train, validate.Y_train)
# get predictions and registers from validation
predictions = svm.predict(validate.X_validation)
registers = validate.Y_validation
if __name__ == '__main__':
# accuracy score 0.9333 is equal 93,33%
print(accuracy_score(registers, predictions))
# matrix with 3 errors made
print(confusion_matrix(registers, predictions))
# classification table (precision, recall, f1-score, support)
print(classification_report(registers, predictions))
|
Add predication from Support Vector Machines.
|
Add predication from Support Vector Machines.
|
Python
|
mit
|
lucasb/iris-machine-learning
|
Add predication from Support Vector Machines.
|
#!/usr/bin/env python
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import validate_dataset as validate
# Make predictions using Support Vector Machines
svm = SVC()
# load train data from validate
svm.fit(validate.X_train, validate.Y_train)
# get predictions and registers from validation
predictions = svm.predict(validate.X_validation)
registers = validate.Y_validation
if __name__ == '__main__':
# accuracy score 0.9333 is equal 93,33%
print(accuracy_score(registers, predictions))
# matrix with 3 errors made
print(confusion_matrix(registers, predictions))
# classification table (precision, recall, f1-score, support)
print(classification_report(registers, predictions))
|
<commit_before><commit_msg>Add predication from Support Vector Machines.<commit_after>
|
#!/usr/bin/env python
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import validate_dataset as validate
# Make predictions using Support Vector Machines
svm = SVC()
# load train data from validate
svm.fit(validate.X_train, validate.Y_train)
# get predictions and registers from validation
predictions = svm.predict(validate.X_validation)
registers = validate.Y_validation
if __name__ == '__main__':
# accuracy score 0.9333 is equal 93,33%
print(accuracy_score(registers, predictions))
# matrix with 3 errors made
print(confusion_matrix(registers, predictions))
# classification table (precision, recall, f1-score, support)
print(classification_report(registers, predictions))
|
Add predication from Support Vector Machines.#!/usr/bin/env python
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import validate_dataset as validate
# Make predictions using Support Vector Machines
svm = SVC()
# load train data from validate
svm.fit(validate.X_train, validate.Y_train)
# get predictions and registers from validation
predictions = svm.predict(validate.X_validation)
registers = validate.Y_validation
if __name__ == '__main__':
# accuracy score 0.9333 is equal 93,33%
print(accuracy_score(registers, predictions))
# matrix with 3 errors made
print(confusion_matrix(registers, predictions))
# classification table (precision, recall, f1-score, support)
print(classification_report(registers, predictions))
|
<commit_before><commit_msg>Add predication from Support Vector Machines.<commit_after>#!/usr/bin/env python
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import validate_dataset as validate
# Make predictions using Support Vector Machines
svm = SVC()
# load train data from validate
svm.fit(validate.X_train, validate.Y_train)
# get predictions and registers from validation
predictions = svm.predict(validate.X_validation)
registers = validate.Y_validation
if __name__ == '__main__':
# accuracy score 0.9333 is equal 93,33%
print(accuracy_score(registers, predictions))
# matrix with 3 errors made
print(confusion_matrix(registers, predictions))
# classification table (precision, recall, f1-score, support)
print(classification_report(registers, predictions))
|
|
e931fbb53557efc5be63015e1ce0055efef965c4
|
aids/strings/is_palindrome.py
|
aids/strings/is_palindrome.py
|
'''
In this module, we determine if a given string is a palindrome
'''
def is_palindrome(string):
'''
Return True if given string is a palindrome
'''
if len(string) < 2:
return True
if string[0] == string[-1]:
return is_palindrome(string[1:-1])
return False
|
Add function to determine if string is a palindrome
|
Add function to determine if string is a palindrome
|
Python
|
mit
|
ueg1990/aids
|
Add function to determine if string is a palindrome
|
'''
In this module, we determine if a given string is a palindrome
'''
def is_palindrome(string):
'''
Return True if given string is a palindrome
'''
if len(string) < 2:
return True
if string[0] == string[-1]:
return is_palindrome(string[1:-1])
return False
|
<commit_before><commit_msg>Add function to determine if string is a palindrome<commit_after>
|
'''
In this module, we determine if a given string is a palindrome
'''
def is_palindrome(string):
'''
Return True if given string is a palindrome
'''
if len(string) < 2:
return True
if string[0] == string[-1]:
return is_palindrome(string[1:-1])
return False
|
Add function to determine if string is a palindrome'''
In this module, we determine if a given string is a palindrome
'''
def is_palindrome(string):
'''
Return True if given string is a palindrome
'''
if len(string) < 2:
return True
if string[0] == string[-1]:
return is_palindrome(string[1:-1])
return False
|
<commit_before><commit_msg>Add function to determine if string is a palindrome<commit_after>'''
In this module, we determine if a given string is a palindrome
'''
def is_palindrome(string):
'''
Return True if given string is a palindrome
'''
if len(string) < 2:
return True
if string[0] == string[-1]:
return is_palindrome(string[1:-1])
return False
|
|
0fd8d6248878db9f6ca7e945f8155f5f6cdc031d
|
gui/ninja_plugin/ninja_nysa/nysa_plugin/editor/fpga_designer/errors.py
|
gui/ninja_plugin/ninja_nysa/nysa_plugin/editor/fpga_designer/errors.py
|
class FPGADesignerError(Exception):
"""
Errors associated with the FPGA designer
Error associated with:
-loading the configuration file
-generating configuration files
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
Put the Exceptions in a seperate file
|
Put the Exceptions in a seperate file
|
Python
|
mit
|
CospanDesign/nysa,CospanDesign/nysa
|
Put the Exceptions in a seperate file
|
class FPGADesignerError(Exception):
"""
Errors associated with the FPGA designer
Error associated with:
-loading the configuration file
-generating configuration files
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
<commit_before><commit_msg>Put the Exceptions in a seperate file<commit_after>
|
class FPGADesignerError(Exception):
"""
Errors associated with the FPGA designer
Error associated with:
-loading the configuration file
-generating configuration files
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
Put the Exceptions in a seperate fileclass FPGADesignerError(Exception):
"""
Errors associated with the FPGA designer
Error associated with:
-loading the configuration file
-generating configuration files
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
<commit_before><commit_msg>Put the Exceptions in a seperate file<commit_after>class FPGADesignerError(Exception):
"""
Errors associated with the FPGA designer
Error associated with:
-loading the configuration file
-generating configuration files
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
|
6937eb931e3577dac57cc36c69c755be5d71d654
|
Testing/test_Misc.py
|
Testing/test_Misc.py
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
Add test to check for bug led to change in Network_mod.updateAssignedVariables
|
Add test to check for bug led to change in Network_mod.updateAssignedVariables
|
Python
|
bsd-3-clause
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
Add test to check for bug led to change in Network_mod.updateAssignedVariables
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test to check for bug led to change in Network_mod.updateAssignedVariables<commit_after>
|
import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
Add test to check for bug led to change in Network_mod.updateAssignedVariablesimport unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test to check for bug led to change in Network_mod.updateAssignedVariables<commit_after>import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
|
3efee0a55103114548013fd7111bf5d9dd0c0bdd
|
tests/test_others.py
|
tests/test_others.py
|
from typing import Optional
import typer
from typer.main import solve_typer_info_defaults, solve_typer_info_help
from typer.models import TyperInfo
from typer.testing import CliRunner
runner = CliRunner()
def test_optional():
app = typer.Typer()
@app.command()
def opt(user: Optional[str] = None):
if user:
typer.echo(f"User: {user}")
else:
typer.echo("No user")
result = runner.invoke(app)
assert result.exit_code == 0
assert "No user" in result.output
result = runner.invoke(app, ["--user", "Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_no_type():
app = typer.Typer()
@app.command()
def no_type(user):
typer.echo(f"User: {user}")
result = runner.invoke(app, ["Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_help_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_help(TyperInfo())
assert value is None
def test_defaults_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_defaults(TyperInfo())
assert value
|
Add 4 extra tests for edge cases
|
:white_check_mark: Add 4 extra tests for edge cases
|
Python
|
mit
|
tiangolo/typer,tiangolo/typer
|
:white_check_mark: Add 4 extra tests for edge cases
|
from typing import Optional
import typer
from typer.main import solve_typer_info_defaults, solve_typer_info_help
from typer.models import TyperInfo
from typer.testing import CliRunner
runner = CliRunner()
def test_optional():
app = typer.Typer()
@app.command()
def opt(user: Optional[str] = None):
if user:
typer.echo(f"User: {user}")
else:
typer.echo("No user")
result = runner.invoke(app)
assert result.exit_code == 0
assert "No user" in result.output
result = runner.invoke(app, ["--user", "Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_no_type():
app = typer.Typer()
@app.command()
def no_type(user):
typer.echo(f"User: {user}")
result = runner.invoke(app, ["Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_help_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_help(TyperInfo())
assert value is None
def test_defaults_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_defaults(TyperInfo())
assert value
|
<commit_before><commit_msg>:white_check_mark: Add 4 extra tests for edge cases<commit_after>
|
from typing import Optional
import typer
from typer.main import solve_typer_info_defaults, solve_typer_info_help
from typer.models import TyperInfo
from typer.testing import CliRunner
runner = CliRunner()
def test_optional():
app = typer.Typer()
@app.command()
def opt(user: Optional[str] = None):
if user:
typer.echo(f"User: {user}")
else:
typer.echo("No user")
result = runner.invoke(app)
assert result.exit_code == 0
assert "No user" in result.output
result = runner.invoke(app, ["--user", "Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_no_type():
app = typer.Typer()
@app.command()
def no_type(user):
typer.echo(f"User: {user}")
result = runner.invoke(app, ["Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_help_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_help(TyperInfo())
assert value is None
def test_defaults_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_defaults(TyperInfo())
assert value
|
:white_check_mark: Add 4 extra tests for edge casesfrom typing import Optional
import typer
from typer.main import solve_typer_info_defaults, solve_typer_info_help
from typer.models import TyperInfo
from typer.testing import CliRunner
runner = CliRunner()
def test_optional():
app = typer.Typer()
@app.command()
def opt(user: Optional[str] = None):
if user:
typer.echo(f"User: {user}")
else:
typer.echo("No user")
result = runner.invoke(app)
assert result.exit_code == 0
assert "No user" in result.output
result = runner.invoke(app, ["--user", "Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_no_type():
app = typer.Typer()
@app.command()
def no_type(user):
typer.echo(f"User: {user}")
result = runner.invoke(app, ["Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_help_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_help(TyperInfo())
assert value is None
def test_defaults_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_defaults(TyperInfo())
assert value
|
<commit_before><commit_msg>:white_check_mark: Add 4 extra tests for edge cases<commit_after>from typing import Optional
import typer
from typer.main import solve_typer_info_defaults, solve_typer_info_help
from typer.models import TyperInfo
from typer.testing import CliRunner
runner = CliRunner()
def test_optional():
app = typer.Typer()
@app.command()
def opt(user: Optional[str] = None):
if user:
typer.echo(f"User: {user}")
else:
typer.echo("No user")
result = runner.invoke(app)
assert result.exit_code == 0
assert "No user" in result.output
result = runner.invoke(app, ["--user", "Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_no_type():
app = typer.Typer()
@app.command()
def no_type(user):
typer.echo(f"User: {user}")
result = runner.invoke(app, ["Camila"])
assert result.exit_code == 0
assert "User: Camila" in result.output
def test_help_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_help(TyperInfo())
assert value is None
def test_defaults_from_info():
# Mainly for coverage/completeness
value = solve_typer_info_defaults(TyperInfo())
assert value
|
|
88f6b30250c4385e0ff8492381e0ba1e75fe0c71
|
test/test_logger.py
|
test/test_logger.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
import pytest
from subprocrunner import (
set_logger,
set_log_level,
)
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
|
Add test cases for the logger
|
Add test cases for the logger
|
Python
|
mit
|
thombashi/subprocrunner,thombashi/subprocrunner
|
Add test cases for the logger
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
import pytest
from subprocrunner import (
set_logger,
set_log_level,
)
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
|
<commit_before><commit_msg>Add test cases for the logger<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
import pytest
from subprocrunner import (
set_logger,
set_log_level,
)
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
|
Add test cases for the logger# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
import pytest
from subprocrunner import (
set_logger,
set_log_level,
)
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
|
<commit_before><commit_msg>Add test cases for the logger<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
import logbook
import pytest
from subprocrunner import (
set_logger,
set_log_level,
)
class Test_set_logger(object):
@pytest.mark.parametrize(["value"], [
[True],
[False],
])
def test_smoke(self, value):
set_logger(value)
class Test_set_log_level(object):
@pytest.mark.parametrize(["value"], [
[logbook.CRITICAL],
[logbook.ERROR],
[logbook.WARNING],
[logbook.NOTICE],
[logbook.INFO],
[logbook.DEBUG],
[logbook.TRACE],
[logbook.NOTSET],
])
def test_smoke(self, value):
set_log_level(value)
@pytest.mark.parametrize(["value", "expected"], [
[None, LookupError],
["unexpected", LookupError],
])
def test_exception(self, value, expected):
with pytest.raises(expected):
set_log_level(value)
|
|
edc2bb7e4dea1359479940990b13c189e1d57a4a
|
cartel-generation.py
|
cartel-generation.py
|
#!/usr/bin/python
import csv
import sys
import getopt
def help():
print 'cartel-generation.py -c <input-csv-file> -o <output-svg-file>'
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hc:o:",["csv=","osvg="])
except getopt.GetoptError:
help()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
help()
sys.exit()
elif opt in ("-c", "--csv"):
inputfile = arg
elif opt in ("-o", "--osvg"):
outputfile = arg
print 'Input csv file is "', inputfile
print 'Output svg file is "', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
Add a basic script with getopt to read command line parameters
|
Add a basic script with getopt to read command line parameters
|
Python
|
mit
|
jmtrivial/cartel-generation
|
Add a basic script with getopt to read command line parameters
|
#!/usr/bin/python
import csv
import sys
import getopt
def help():
print 'cartel-generation.py -c <input-csv-file> -o <output-svg-file>'
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hc:o:",["csv=","osvg="])
except getopt.GetoptError:
help()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
help()
sys.exit()
elif opt in ("-c", "--csv"):
inputfile = arg
elif opt in ("-o", "--osvg"):
outputfile = arg
print 'Input csv file is "', inputfile
print 'Output svg file is "', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add a basic script with getopt to read command line parameters<commit_after>
|
#!/usr/bin/python
import csv
import sys
import getopt
def help():
print 'cartel-generation.py -c <input-csv-file> -o <output-svg-file>'
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hc:o:",["csv=","osvg="])
except getopt.GetoptError:
help()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
help()
sys.exit()
elif opt in ("-c", "--csv"):
inputfile = arg
elif opt in ("-o", "--osvg"):
outputfile = arg
print 'Input csv file is "', inputfile
print 'Output svg file is "', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
Add a basic script with getopt to read command line parameters#!/usr/bin/python
import csv
import sys
import getopt
def help():
print 'cartel-generation.py -c <input-csv-file> -o <output-svg-file>'
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hc:o:",["csv=","osvg="])
except getopt.GetoptError:
help()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
help()
sys.exit()
elif opt in ("-c", "--csv"):
inputfile = arg
elif opt in ("-o", "--osvg"):
outputfile = arg
print 'Input csv file is "', inputfile
print 'Output svg file is "', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add a basic script with getopt to read command line parameters<commit_after>#!/usr/bin/python
import csv
import sys
import getopt
def help():
print 'cartel-generation.py -c <input-csv-file> -o <output-svg-file>'
def main(argv):
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv,"hc:o:",["csv=","osvg="])
except getopt.GetoptError:
help()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
help()
sys.exit()
elif opt in ("-c", "--csv"):
inputfile = arg
elif opt in ("-o", "--osvg"):
outputfile = arg
print 'Input csv file is "', inputfile
print 'Output svg file is "', outputfile
if __name__ == "__main__":
main(sys.argv[1:])
|
|
18eeec0557b8acc78849295d6f9358696296f8ad
|
make_inline_config.py
|
make_inline_config.py
|
# Transform config/index.html so that all of its image, font, CSS, and script
# dependencies are inlined, and convert the result into a data URI.
#
# This makes it possible to view the Urchin configuration page without an
# internet connection.
#
# See https://github.com/pebble/clay for a more robust implementation of offline
# configuration pages.
import base64
import os
import re
out = 'data:text/html;base64'
# Use re instead of something like PyQuery so that the hell of installing lxml
# is not a prerequisite for building the watchface
html = open('config/index.html').read()
css_tags = re.findall('(<link[^>]* href="([^"]+)">)', html, re.I)
assert len(css_tags) == 1
css = open('config/' + css_tags[0][1]).read()
css_dir = os.path.dirname(css_tags[0][1])
urls = re.findall('(url\(([^)]+)\))', css, re.I)
assert len(urls) > 0
for url in urls:
filename = url[1]
filename = re.sub('(^"|"$)', '', filename)
filename = re.sub("(^'|'$)", '', filename)
assert filename.endswith('.woff') or filename.endswith('.png')
full_filename = os.path.join('config', css_dir, filename)
encoded = base64.b64encode(open(full_filename, "rb").read())
if filename.endswith('.woff'):
mime_type = 'application/font-woff'
else:
mime_type = 'image/png'
css = css.replace(
url[0],
'url(data:{};base64,{})'.format(mime_type, encoded)
)
# TODO trim newlines/whitespace in css
html = html.replace(
css_tags[0][0],
'<style type="text/css">{}</style>'.format(css)
)
js_tags = re.findall('(<script[^>]* src="([^"]+)"></script>)', html, re.I)
assert len(js_tags) > 0
for js_tag in js_tags:
filename = js_tag[1]
# TODO trim newlines/whitespace
js = open('config/' + js_tag[1]).read()
html = html.replace(
js_tag[0],
'<script type="text/javascript">{}</script>'.format(js)
)
# TODO trim newlines/whitespace
print html
|
Add script to generate inlined config page
|
Add script to generate inlined config page
|
Python
|
mit
|
mddub/urchin-cgm,mddub/urchin-cgm,mddub/urchin-cgm,mddub/urchin-cgm,mddub/urchin-cgm
|
Add script to generate inlined config page
|
# Transform config/index.html so that all of its image, font, CSS, and script
# dependencies are inlined, and convert the result into a data URI.
#
# This makes it possible to view the Urchin configuration page without an
# internet connection.
#
# See https://github.com/pebble/clay for a more robust implementation of offline
# configuration pages.
import base64
import os
import re
out = 'data:text/html;base64'
# Use re instead of something like PyQuery so that the hell of installing lxml
# is not a prerequisite for building the watchface
html = open('config/index.html').read()
css_tags = re.findall('(<link[^>]* href="([^"]+)">)', html, re.I)
assert len(css_tags) == 1
css = open('config/' + css_tags[0][1]).read()
css_dir = os.path.dirname(css_tags[0][1])
urls = re.findall('(url\(([^)]+)\))', css, re.I)
assert len(urls) > 0
for url in urls:
filename = url[1]
filename = re.sub('(^"|"$)', '', filename)
filename = re.sub("(^'|'$)", '', filename)
assert filename.endswith('.woff') or filename.endswith('.png')
full_filename = os.path.join('config', css_dir, filename)
encoded = base64.b64encode(open(full_filename, "rb").read())
if filename.endswith('.woff'):
mime_type = 'application/font-woff'
else:
mime_type = 'image/png'
css = css.replace(
url[0],
'url(data:{};base64,{})'.format(mime_type, encoded)
)
# TODO trim newlines/whitespace in css
html = html.replace(
css_tags[0][0],
'<style type="text/css">{}</style>'.format(css)
)
js_tags = re.findall('(<script[^>]* src="([^"]+)"></script>)', html, re.I)
assert len(js_tags) > 0
for js_tag in js_tags:
filename = js_tag[1]
# TODO trim newlines/whitespace
js = open('config/' + js_tag[1]).read()
html = html.replace(
js_tag[0],
'<script type="text/javascript">{}</script>'.format(js)
)
# TODO trim newlines/whitespace
print html
|
<commit_before><commit_msg>Add script to generate inlined config page<commit_after>
|
# Transform config/index.html so that all of its image, font, CSS, and script
# dependencies are inlined, and convert the result into a data URI.
#
# This makes it possible to view the Urchin configuration page without an
# internet connection.
#
# See https://github.com/pebble/clay for a more robust implementation of offline
# configuration pages.
import base64
import os
import re
out = 'data:text/html;base64'
# Use re instead of something like PyQuery so that the hell of installing lxml
# is not a prerequisite for building the watchface
html = open('config/index.html').read()
css_tags = re.findall('(<link[^>]* href="([^"]+)">)', html, re.I)
assert len(css_tags) == 1
css = open('config/' + css_tags[0][1]).read()
css_dir = os.path.dirname(css_tags[0][1])
urls = re.findall('(url\(([^)]+)\))', css, re.I)
assert len(urls) > 0
for url in urls:
filename = url[1]
filename = re.sub('(^"|"$)', '', filename)
filename = re.sub("(^'|'$)", '', filename)
assert filename.endswith('.woff') or filename.endswith('.png')
full_filename = os.path.join('config', css_dir, filename)
encoded = base64.b64encode(open(full_filename, "rb").read())
if filename.endswith('.woff'):
mime_type = 'application/font-woff'
else:
mime_type = 'image/png'
css = css.replace(
url[0],
'url(data:{};base64,{})'.format(mime_type, encoded)
)
# TODO trim newlines/whitespace in css
html = html.replace(
css_tags[0][0],
'<style type="text/css">{}</style>'.format(css)
)
js_tags = re.findall('(<script[^>]* src="([^"]+)"></script>)', html, re.I)
assert len(js_tags) > 0
for js_tag in js_tags:
filename = js_tag[1]
# TODO trim newlines/whitespace
js = open('config/' + js_tag[1]).read()
html = html.replace(
js_tag[0],
'<script type="text/javascript">{}</script>'.format(js)
)
# TODO trim newlines/whitespace
print html
|
Add script to generate inlined config page# Transform config/index.html so that all of its image, font, CSS, and script
# dependencies are inlined, and convert the result into a data URI.
#
# This makes it possible to view the Urchin configuration page without an
# internet connection.
#
# See https://github.com/pebble/clay for a more robust implementation of offline
# configuration pages.
import base64
import os
import re
out = 'data:text/html;base64'
# Use re instead of something like PyQuery so that the hell of installing lxml
# is not a prerequisite for building the watchface
html = open('config/index.html').read()
css_tags = re.findall('(<link[^>]* href="([^"]+)">)', html, re.I)
assert len(css_tags) == 1
css = open('config/' + css_tags[0][1]).read()
css_dir = os.path.dirname(css_tags[0][1])
urls = re.findall('(url\(([^)]+)\))', css, re.I)
assert len(urls) > 0
for url in urls:
filename = url[1]
filename = re.sub('(^"|"$)', '', filename)
filename = re.sub("(^'|'$)", '', filename)
assert filename.endswith('.woff') or filename.endswith('.png')
full_filename = os.path.join('config', css_dir, filename)
encoded = base64.b64encode(open(full_filename, "rb").read())
if filename.endswith('.woff'):
mime_type = 'application/font-woff'
else:
mime_type = 'image/png'
css = css.replace(
url[0],
'url(data:{};base64,{})'.format(mime_type, encoded)
)
# TODO trim newlines/whitespace in css
html = html.replace(
css_tags[0][0],
'<style type="text/css">{}</style>'.format(css)
)
js_tags = re.findall('(<script[^>]* src="([^"]+)"></script>)', html, re.I)
assert len(js_tags) > 0
for js_tag in js_tags:
filename = js_tag[1]
# TODO trim newlines/whitespace
js = open('config/' + js_tag[1]).read()
html = html.replace(
js_tag[0],
'<script type="text/javascript">{}</script>'.format(js)
)
# TODO trim newlines/whitespace
print html
|
<commit_before><commit_msg>Add script to generate inlined config page<commit_after># Transform config/index.html so that all of its image, font, CSS, and script
# dependencies are inlined, and convert the result into a data URI.
#
# This makes it possible to view the Urchin configuration page without an
# internet connection.
#
# See https://github.com/pebble/clay for a more robust implementation of offline
# configuration pages.
import base64
import os
import re
out = 'data:text/html;base64'
# Use re instead of something like PyQuery so that the hell of installing lxml
# is not a prerequisite for building the watchface
html = open('config/index.html').read()
css_tags = re.findall('(<link[^>]* href="([^"]+)">)', html, re.I)
assert len(css_tags) == 1
css = open('config/' + css_tags[0][1]).read()
css_dir = os.path.dirname(css_tags[0][1])
urls = re.findall('(url\(([^)]+)\))', css, re.I)
assert len(urls) > 0
for url in urls:
filename = url[1]
filename = re.sub('(^"|"$)', '', filename)
filename = re.sub("(^'|'$)", '', filename)
assert filename.endswith('.woff') or filename.endswith('.png')
full_filename = os.path.join('config', css_dir, filename)
encoded = base64.b64encode(open(full_filename, "rb").read())
if filename.endswith('.woff'):
mime_type = 'application/font-woff'
else:
mime_type = 'image/png'
css = css.replace(
url[0],
'url(data:{};base64,{})'.format(mime_type, encoded)
)
# TODO trim newlines/whitespace in css
html = html.replace(
css_tags[0][0],
'<style type="text/css">{}</style>'.format(css)
)
js_tags = re.findall('(<script[^>]* src="([^"]+)"></script>)', html, re.I)
assert len(js_tags) > 0
for js_tag in js_tags:
filename = js_tag[1]
# TODO trim newlines/whitespace
js = open('config/' + js_tag[1]).read()
html = html.replace(
js_tag[0],
'<script type="text/javascript">{}</script>'.format(js)
)
# TODO trim newlines/whitespace
print html
|
|
f2a6897aaa20d2c5a312b1a87d5a7f515f3cdd4b
|
lightware_parse.py
|
lightware_parse.py
|
#!/usr/bin/env python
import serial
s = serial.Serial('/dev/ttyUSB0', baudrate=115200)
while True:
line = s.readline()
dist = line.lstrip(' ').split(' ')[0]
print dist
|
Add lightware LRF parsing code
|
Add lightware LRF parsing code
|
Python
|
mit
|
UCSD-E4E/aerial_lidar,UCSD-E4E/aerial_lidar,UCSD-E4E/aerial_lidar,UCSD-E4E/aerial_lidar,UCSD-E4E/aerial_lidar,UCSD-E4E/aerial_lidar
|
Add lightware LRF parsing code
|
#!/usr/bin/env python
import serial
s = serial.Serial('/dev/ttyUSB0', baudrate=115200)
while True:
line = s.readline()
dist = line.lstrip(' ').split(' ')[0]
print dist
|
<commit_before><commit_msg>Add lightware LRF parsing code<commit_after>
|
#!/usr/bin/env python
import serial
s = serial.Serial('/dev/ttyUSB0', baudrate=115200)
while True:
line = s.readline()
dist = line.lstrip(' ').split(' ')[0]
print dist
|
Add lightware LRF parsing code#!/usr/bin/env python
import serial
s = serial.Serial('/dev/ttyUSB0', baudrate=115200)
while True:
line = s.readline()
dist = line.lstrip(' ').split(' ')[0]
print dist
|
<commit_before><commit_msg>Add lightware LRF parsing code<commit_after>#!/usr/bin/env python
import serial
s = serial.Serial('/dev/ttyUSB0', baudrate=115200)
while True:
line = s.readline()
dist = line.lstrip(' ').split(' ')[0]
print dist
|
|
ab40c975c863ab7d19f05695473cb7c42ec33e9e
|
prototypes/fabfile.py
|
prototypes/fabfile.py
|
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures():
set(fab_hosts = ['127.0.0.1', 'localhost'])
run('false 1', fail='ignore')
run('false 2', fail='warn')
run('echo must print')
run('false 3') # default fail is abort
run('echo must NOT print')
|
Add a file for testing the fab_fail modes and possibly other stuff.
|
Add a file for testing the fab_fail modes and possibly other stuff.
|
Python
|
bsd-2-clause
|
fernandezcuesta/fabric,mathiasertl/fabric,bspink/fabric,rane-hs/fabric-py3,itoed/fabric,sdelements/fabric,tolbkni/fabric,TarasRudnyk/fabric,tekapo/fabric,felix-d/fabric,qinrong/fabric,haridsv/fabric,likesxuqiang/fabric,xLegoz/fabric,bitprophet/fabric,ploxiln/fabric,StackStorm/fabric,amaniak/fabric,MjAbuz/fabric,cmattoon/fabric,elijah513/fabric,getsentry/fabric,pgroudas/fabric,kmonsoor/fabric,simon-engledew/fabric,akaariai/fabric,kxxoling/fabric,raimon49/fabric,rodrigc/fabric,jaraco/fabric,hrubi/fabric,SamuelMarks/fabric,opavader/fabric,cgvarela/fabric,ericholscher/fabric,askulkarni2/fabric,rbramwell/fabric,bitmonk/fabric,pashinin/fabric
|
Add a file for testing the fab_fail modes and possibly other stuff.
|
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures():
set(fab_hosts = ['127.0.0.1', 'localhost'])
run('false 1', fail='ignore')
run('false 2', fail='warn')
run('echo must print')
run('false 3') # default fail is abort
run('echo must NOT print')
|
<commit_before><commit_msg>Add a file for testing the fab_fail modes and possibly other stuff.<commit_after>
|
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures():
set(fab_hosts = ['127.0.0.1', 'localhost'])
run('false 1', fail='ignore')
run('false 2', fail='warn')
run('echo must print')
run('false 3') # default fail is abort
run('echo must NOT print')
|
Add a file for testing the fab_fail modes and possibly other stuff.
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures():
set(fab_hosts = ['127.0.0.1', 'localhost'])
run('false 1', fail='ignore')
run('false 2', fail='warn')
run('echo must print')
run('false 3') # default fail is abort
run('echo must NOT print')
|
<commit_before><commit_msg>Add a file for testing the fab_fail modes and possibly other stuff.<commit_after>
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures():
set(fab_hosts = ['127.0.0.1', 'localhost'])
run('false 1', fail='ignore')
run('false 2', fail='warn')
run('echo must print')
run('false 3') # default fail is abort
run('echo must NOT print')
|
|
ed6bafbed82b35913965a6d95ba11b4f3eaf4b02
|
tests/test_model.py
|
tests/test_model.py
|
import ckanext.archiver.model as archiver_model
try:
from ckan.tests.helpers import reset_db
from ckan.tests import factories as ckan_factories
except ImportError:
from ckan.new_tests.helpers import reset_db
from ckan.new_tests import factories as ckan_factories
from ckan import model
Archival = archiver_model.Archival
class TestArchival(object):
@classmethod
def setup_class(cls):
reset_db()
archiver_model.init_tables(model.meta.engine)
def test_create(self):
dataset = ckan_factories.Dataset()
res = ckan_factories.Resource(package_id=dataset['id'])
archival = Archival.create(res['id'])
assert isinstance(archival, Archival)
assert archival.package_id == dataset['id']
|
Test for previous ResourceGroup change.
|
Test for previous ResourceGroup change.
|
Python
|
mit
|
datagovuk/ckanext-archiver,ckan/ckanext-archiver,ckan/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,ckan/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,datagovuk/ckanext-archiver,datagovuk/ckanext-archiver
|
Test for previous ResourceGroup change.
|
import ckanext.archiver.model as archiver_model
try:
from ckan.tests.helpers import reset_db
from ckan.tests import factories as ckan_factories
except ImportError:
from ckan.new_tests.helpers import reset_db
from ckan.new_tests import factories as ckan_factories
from ckan import model
Archival = archiver_model.Archival
class TestArchival(object):
@classmethod
def setup_class(cls):
reset_db()
archiver_model.init_tables(model.meta.engine)
def test_create(self):
dataset = ckan_factories.Dataset()
res = ckan_factories.Resource(package_id=dataset['id'])
archival = Archival.create(res['id'])
assert isinstance(archival, Archival)
assert archival.package_id == dataset['id']
|
<commit_before><commit_msg>Test for previous ResourceGroup change.<commit_after>
|
import ckanext.archiver.model as archiver_model
try:
from ckan.tests.helpers import reset_db
from ckan.tests import factories as ckan_factories
except ImportError:
from ckan.new_tests.helpers import reset_db
from ckan.new_tests import factories as ckan_factories
from ckan import model
Archival = archiver_model.Archival
class TestArchival(object):
@classmethod
def setup_class(cls):
reset_db()
archiver_model.init_tables(model.meta.engine)
def test_create(self):
dataset = ckan_factories.Dataset()
res = ckan_factories.Resource(package_id=dataset['id'])
archival = Archival.create(res['id'])
assert isinstance(archival, Archival)
assert archival.package_id == dataset['id']
|
Test for previous ResourceGroup change.import ckanext.archiver.model as archiver_model
try:
from ckan.tests.helpers import reset_db
from ckan.tests import factories as ckan_factories
except ImportError:
from ckan.new_tests.helpers import reset_db
from ckan.new_tests import factories as ckan_factories
from ckan import model
Archival = archiver_model.Archival
class TestArchival(object):
@classmethod
def setup_class(cls):
reset_db()
archiver_model.init_tables(model.meta.engine)
def test_create(self):
dataset = ckan_factories.Dataset()
res = ckan_factories.Resource(package_id=dataset['id'])
archival = Archival.create(res['id'])
assert isinstance(archival, Archival)
assert archival.package_id == dataset['id']
|
<commit_before><commit_msg>Test for previous ResourceGroup change.<commit_after>import ckanext.archiver.model as archiver_model
try:
from ckan.tests.helpers import reset_db
from ckan.tests import factories as ckan_factories
except ImportError:
from ckan.new_tests.helpers import reset_db
from ckan.new_tests import factories as ckan_factories
from ckan import model
Archival = archiver_model.Archival
class TestArchival(object):
@classmethod
def setup_class(cls):
reset_db()
archiver_model.init_tables(model.meta.engine)
def test_create(self):
dataset = ckan_factories.Dataset()
res = ckan_factories.Resource(package_id=dataset['id'])
archival = Archival.create(res['id'])
assert isinstance(archival, Archival)
assert archival.package_id == dataset['id']
|
|
f065102e7f6d302097418d6d8d07dd6e3cc38f11
|
tests/test_money.py
|
tests/test_money.py
|
from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
Add a few simple tests for the money_maker module
|
Add a few simple tests for the money_maker module
|
Python
|
bsd-3-clause
|
nimbis/django-shop,jrief/django-shop,nimbis/django-shop,awesto/django-shop,divio/django-shop,nimbis/django-shop,nimbis/django-shop,rfleschenberg/django-shop,awesto/django-shop,rfleschenberg/django-shop,rfleschenberg/django-shop,khchine5/django-shop,khchine5/django-shop,divio/django-shop,awesto/django-shop,khchine5/django-shop,khchine5/django-shop,jrief/django-shop,divio/django-shop,jrief/django-shop,rfleschenberg/django-shop,jrief/django-shop
|
Add a few simple tests for the money_maker module
|
from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
<commit_before><commit_msg>Add a few simple tests for the money_maker module<commit_after>
|
from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
Add a few simple tests for the money_maker modulefrom decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
<commit_before><commit_msg>Add a few simple tests for the money_maker module<commit_after>from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
|
7536d46435bd4841f538a4d9ca6fc58b3b0113bf
|
test/569-duplicate-points.py
|
test/569-duplicate-points.py
|
import sys
def assert_no_repeated_points(coords):
last_coord = coords[0]
for i in range(1, len(coords)):
coord = coords[i]
if coord == last_coord:
raise Exception("Coordinate %r (at %d) == %r (at %d), but "
"coordinates should not be repeated." %
(coord, i, last_coord, i-1))
with features_in_tile_layer(16, 17885, 27755, 'roads') as features:
for feature in features:
gtype = feature['geometry']['type']
if gtype == 'LineString':
assert_no_repeated_points(feature['geometry']['coordinates'])
elif gtype == 'MultiLineString':
for linestring in feature['geometry']['coordinates']:
assert_no_repeated_points(linestring)
|
Add test for duplicate / repeated points.
|
Add test for duplicate / repeated points.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
Add test for duplicate / repeated points.
|
import sys
def assert_no_repeated_points(coords):
last_coord = coords[0]
for i in range(1, len(coords)):
coord = coords[i]
if coord == last_coord:
raise Exception("Coordinate %r (at %d) == %r (at %d), but "
"coordinates should not be repeated." %
(coord, i, last_coord, i-1))
with features_in_tile_layer(16, 17885, 27755, 'roads') as features:
for feature in features:
gtype = feature['geometry']['type']
if gtype == 'LineString':
assert_no_repeated_points(feature['geometry']['coordinates'])
elif gtype == 'MultiLineString':
for linestring in feature['geometry']['coordinates']:
assert_no_repeated_points(linestring)
|
<commit_before><commit_msg>Add test for duplicate / repeated points.<commit_after>
|
import sys
def assert_no_repeated_points(coords):
last_coord = coords[0]
for i in range(1, len(coords)):
coord = coords[i]
if coord == last_coord:
raise Exception("Coordinate %r (at %d) == %r (at %d), but "
"coordinates should not be repeated." %
(coord, i, last_coord, i-1))
with features_in_tile_layer(16, 17885, 27755, 'roads') as features:
for feature in features:
gtype = feature['geometry']['type']
if gtype == 'LineString':
assert_no_repeated_points(feature['geometry']['coordinates'])
elif gtype == 'MultiLineString':
for linestring in feature['geometry']['coordinates']:
assert_no_repeated_points(linestring)
|
Add test for duplicate / repeated points.import sys
def assert_no_repeated_points(coords):
last_coord = coords[0]
for i in range(1, len(coords)):
coord = coords[i]
if coord == last_coord:
raise Exception("Coordinate %r (at %d) == %r (at %d), but "
"coordinates should not be repeated." %
(coord, i, last_coord, i-1))
with features_in_tile_layer(16, 17885, 27755, 'roads') as features:
for feature in features:
gtype = feature['geometry']['type']
if gtype == 'LineString':
assert_no_repeated_points(feature['geometry']['coordinates'])
elif gtype == 'MultiLineString':
for linestring in feature['geometry']['coordinates']:
assert_no_repeated_points(linestring)
|
<commit_before><commit_msg>Add test for duplicate / repeated points.<commit_after>import sys
def assert_no_repeated_points(coords):
last_coord = coords[0]
for i in range(1, len(coords)):
coord = coords[i]
if coord == last_coord:
raise Exception("Coordinate %r (at %d) == %r (at %d), but "
"coordinates should not be repeated." %
(coord, i, last_coord, i-1))
with features_in_tile_layer(16, 17885, 27755, 'roads') as features:
for feature in features:
gtype = feature['geometry']['type']
if gtype == 'LineString':
assert_no_repeated_points(feature['geometry']['coordinates'])
elif gtype == 'MultiLineString':
for linestring in feature['geometry']['coordinates']:
assert_no_repeated_points(linestring)
|
|
3f0a0b200b7bc1d1d054d7d3982f1fafae2318c1
|
eheritage/injest/qld_ehp_site_parser.py
|
eheritage/injest/qld_ehp_site_parser.py
|
# import requests
import lxml.html
import sys
with open(sys.argv[1]) as f:
data = f.read()
dom = lxml.html.fromstring(data)
for el in dom.xpath("//div[@class='article']//td[@class='formLayoutLabelTD']"):
name = el.findtext('.').strip()
print name
# if name == 'Place Components':
# import ipdb; ipdb.set_trace()
parent = el.getparent()
contentEl = parent.xpath("td")[1]
content = [contentEl.findtext('.').strip()]
# content = [c.findtext('.').strip() for c in parent.xpath("td")[1:]]
content += [m.tail.strip() for m in contentEl.getchildren() if m.tail.strip()]
print content
print
|
Add some code for screen scraping QLD data
|
Add some code for screen scraping QLD data
|
Python
|
bsd-3-clause
|
uq-eresearch/eheritage,uq-eresearch/eheritage,uq-eresearch/eheritage
|
Add some code for screen scraping QLD data
|
# import requests
import lxml.html
import sys
with open(sys.argv[1]) as f:
data = f.read()
dom = lxml.html.fromstring(data)
for el in dom.xpath("//div[@class='article']//td[@class='formLayoutLabelTD']"):
name = el.findtext('.').strip()
print name
# if name == 'Place Components':
# import ipdb; ipdb.set_trace()
parent = el.getparent()
contentEl = parent.xpath("td")[1]
content = [contentEl.findtext('.').strip()]
# content = [c.findtext('.').strip() for c in parent.xpath("td")[1:]]
content += [m.tail.strip() for m in contentEl.getchildren() if m.tail.strip()]
print content
print
|
<commit_before><commit_msg>Add some code for screen scraping QLD data<commit_after>
|
# import requests
import lxml.html
import sys
with open(sys.argv[1]) as f:
data = f.read()
dom = lxml.html.fromstring(data)
for el in dom.xpath("//div[@class='article']//td[@class='formLayoutLabelTD']"):
name = el.findtext('.').strip()
print name
# if name == 'Place Components':
# import ipdb; ipdb.set_trace()
parent = el.getparent()
contentEl = parent.xpath("td")[1]
content = [contentEl.findtext('.').strip()]
# content = [c.findtext('.').strip() for c in parent.xpath("td")[1:]]
content += [m.tail.strip() for m in contentEl.getchildren() if m.tail.strip()]
print content
print
|
Add some code for screen scraping QLD data# import requests
import lxml.html
import sys
with open(sys.argv[1]) as f:
data = f.read()
dom = lxml.html.fromstring(data)
for el in dom.xpath("//div[@class='article']//td[@class='formLayoutLabelTD']"):
name = el.findtext('.').strip()
print name
# if name == 'Place Components':
# import ipdb; ipdb.set_trace()
parent = el.getparent()
contentEl = parent.xpath("td")[1]
content = [contentEl.findtext('.').strip()]
# content = [c.findtext('.').strip() for c in parent.xpath("td")[1:]]
content += [m.tail.strip() for m in contentEl.getchildren() if m.tail.strip()]
print content
print
|
<commit_before><commit_msg>Add some code for screen scraping QLD data<commit_after># import requests
import lxml.html
import sys
with open(sys.argv[1]) as f:
data = f.read()
dom = lxml.html.fromstring(data)
for el in dom.xpath("//div[@class='article']//td[@class='formLayoutLabelTD']"):
name = el.findtext('.').strip()
print name
# if name == 'Place Components':
# import ipdb; ipdb.set_trace()
parent = el.getparent()
contentEl = parent.xpath("td")[1]
content = [contentEl.findtext('.').strip()]
# content = [c.findtext('.').strip() for c in parent.xpath("td")[1:]]
content += [m.tail.strip() for m in contentEl.getchildren() if m.tail.strip()]
print content
print
|
|
84e68dd23b031785aea517daf7c1f760587c44ff
|
tests/test_core/test_auth.py
|
tests/test_core/test_auth.py
|
"""Test kytos.core.auth module."""
import getpass
from unittest import TestCase
from unittest.mock import patch
def input_password():
"""Get password value"""
password = getpass.getpass()
return password
def input_value():
"""Get input value"""
value = input()
return value
class TestAuth(TestCase):
"""Auth tests."""
@classmethod
@patch("getpass.getpass")
def test_getpass(cls, password):
"""Test when getpass is calling on authentication."""
password.return_value = "youshallnotpass"
assert input_password() == password.return_value
@classmethod
@patch("builtins.input")
def test_user_values(cls, user_value):
"""Test when input is calling on authentication."""
user_value.return_value = "kuser"
assert input_value() == user_value.return_value
|
Create unit test for Auth module
|
Create unit test for Auth module
|
Python
|
mit
|
kytos/kyco,kytos/kytos
|
Create unit test for Auth module
|
"""Test kytos.core.auth module."""
import getpass
from unittest import TestCase
from unittest.mock import patch
def input_password():
"""Get password value"""
password = getpass.getpass()
return password
def input_value():
"""Get input value"""
value = input()
return value
class TestAuth(TestCase):
"""Auth tests."""
@classmethod
@patch("getpass.getpass")
def test_getpass(cls, password):
"""Test when getpass is calling on authentication."""
password.return_value = "youshallnotpass"
assert input_password() == password.return_value
@classmethod
@patch("builtins.input")
def test_user_values(cls, user_value):
"""Test when input is calling on authentication."""
user_value.return_value = "kuser"
assert input_value() == user_value.return_value
|
<commit_before><commit_msg>Create unit test for Auth module<commit_after>
|
"""Test kytos.core.auth module."""
import getpass
from unittest import TestCase
from unittest.mock import patch
def input_password():
"""Get password value"""
password = getpass.getpass()
return password
def input_value():
"""Get input value"""
value = input()
return value
class TestAuth(TestCase):
"""Auth tests."""
@classmethod
@patch("getpass.getpass")
def test_getpass(cls, password):
"""Test when getpass is calling on authentication."""
password.return_value = "youshallnotpass"
assert input_password() == password.return_value
@classmethod
@patch("builtins.input")
def test_user_values(cls, user_value):
"""Test when input is calling on authentication."""
user_value.return_value = "kuser"
assert input_value() == user_value.return_value
|
Create unit test for Auth module"""Test kytos.core.auth module."""
import getpass
from unittest import TestCase
from unittest.mock import patch
def input_password():
"""Get password value"""
password = getpass.getpass()
return password
def input_value():
"""Get input value"""
value = input()
return value
class TestAuth(TestCase):
"""Auth tests."""
@classmethod
@patch("getpass.getpass")
def test_getpass(cls, password):
"""Test when getpass is calling on authentication."""
password.return_value = "youshallnotpass"
assert input_password() == password.return_value
@classmethod
@patch("builtins.input")
def test_user_values(cls, user_value):
"""Test when input is calling on authentication."""
user_value.return_value = "kuser"
assert input_value() == user_value.return_value
|
<commit_before><commit_msg>Create unit test for Auth module<commit_after>"""Test kytos.core.auth module."""
import getpass
from unittest import TestCase
from unittest.mock import patch
def input_password():
"""Get password value"""
password = getpass.getpass()
return password
def input_value():
"""Get input value"""
value = input()
return value
class TestAuth(TestCase):
"""Auth tests."""
@classmethod
@patch("getpass.getpass")
def test_getpass(cls, password):
"""Test when getpass is calling on authentication."""
password.return_value = "youshallnotpass"
assert input_password() == password.return_value
@classmethod
@patch("builtins.input")
def test_user_values(cls, user_value):
"""Test when input is calling on authentication."""
user_value.return_value = "kuser"
assert input_value() == user_value.return_value
|
|
7d1d1a073971b97680e9a9316bd199acebc9919d
|
gem/migrations/0014_add_default_tags.py
|
gem/migrations/0014_add_default_tags.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import (
SiteLanguage, Tag, TagIndexPage, Main, PageTranslation)
def add_default_tags(apps, schema_editor):
main_lang = SiteLanguage.objects.filter(
is_active=True, is_main_language=True).first()
tag_index = TagIndexPage.objects.first()
tags_list = [
[{'title': 'health', 'locale': 'en'}, {'title': 'الصحة', 'locale': 'ar'}, {'title': 'স্বাস্থ্য', 'locale': 'bn'}],
[{'title': 'periods', 'locale': 'en'}, {'title': 'الدورة', 'locale': 'ar'}, {'title': 'পিরিয়ড', 'locale': 'bn'}]
]
for tag in tags_list:
for t in tag:
if main_lang.locale == t['locale']:
main_tag = create_tag(t['title'], tag_index)
for t in tag:
child_lang = SiteLanguage.objects.filter(
locale=t['locale'], is_main_language=False).first()
if child_lang:
create_tag_translation(
main_tag, child_lang, t['title'], tag_index)
def create_tag(title, tag_index):
if Tag.objects.filter(title=title).exists():
return Tag.objects.filter(title=title).first()
else:
tag = Tag(title=title)
tag_index.add_child(instance=tag)
tag.save_revision().publish()
return tag
def create_tag_translation(main_tag, language, translated_title, tag_index):
translated_tag = create_tag(translated_title, tag_index)
if translated_tag:
language_relation = translated_tag.languages.first()
language_relation.language = language
language_relation.save()
translated_tag.save_revision().publish()
PageTranslation.objects.get_or_create(
page=main_tag, translated_page=translated_tag)
class Migration(migrations.Migration):
dependencies = [
('gem', '0013_gemsettings_moderator_name'),
]
operations = [
migrations.RunPython(add_default_tags),
]
|
Add migration for default tags
|
Add migration for default tags
|
Python
|
bsd-2-clause
|
praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem
|
Add migration for default tags
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import (
SiteLanguage, Tag, TagIndexPage, Main, PageTranslation)
def add_default_tags(apps, schema_editor):
main_lang = SiteLanguage.objects.filter(
is_active=True, is_main_language=True).first()
tag_index = TagIndexPage.objects.first()
tags_list = [
[{'title': 'health', 'locale': 'en'}, {'title': 'الصحة', 'locale': 'ar'}, {'title': 'স্বাস্থ্য', 'locale': 'bn'}],
[{'title': 'periods', 'locale': 'en'}, {'title': 'الدورة', 'locale': 'ar'}, {'title': 'পিরিয়ড', 'locale': 'bn'}]
]
for tag in tags_list:
for t in tag:
if main_lang.locale == t['locale']:
main_tag = create_tag(t['title'], tag_index)
for t in tag:
child_lang = SiteLanguage.objects.filter(
locale=t['locale'], is_main_language=False).first()
if child_lang:
create_tag_translation(
main_tag, child_lang, t['title'], tag_index)
def create_tag(title, tag_index):
if Tag.objects.filter(title=title).exists():
return Tag.objects.filter(title=title).first()
else:
tag = Tag(title=title)
tag_index.add_child(instance=tag)
tag.save_revision().publish()
return tag
def create_tag_translation(main_tag, language, translated_title, tag_index):
translated_tag = create_tag(translated_title, tag_index)
if translated_tag:
language_relation = translated_tag.languages.first()
language_relation.language = language
language_relation.save()
translated_tag.save_revision().publish()
PageTranslation.objects.get_or_create(
page=main_tag, translated_page=translated_tag)
class Migration(migrations.Migration):
dependencies = [
('gem', '0013_gemsettings_moderator_name'),
]
operations = [
migrations.RunPython(add_default_tags),
]
|
<commit_before><commit_msg>Add migration for default tags<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import (
SiteLanguage, Tag, TagIndexPage, Main, PageTranslation)
def add_default_tags(apps, schema_editor):
main_lang = SiteLanguage.objects.filter(
is_active=True, is_main_language=True).first()
tag_index = TagIndexPage.objects.first()
tags_list = [
[{'title': 'health', 'locale': 'en'}, {'title': 'الصحة', 'locale': 'ar'}, {'title': 'স্বাস্থ্য', 'locale': 'bn'}],
[{'title': 'periods', 'locale': 'en'}, {'title': 'الدورة', 'locale': 'ar'}, {'title': 'পিরিয়ড', 'locale': 'bn'}]
]
for tag in tags_list:
for t in tag:
if main_lang.locale == t['locale']:
main_tag = create_tag(t['title'], tag_index)
for t in tag:
child_lang = SiteLanguage.objects.filter(
locale=t['locale'], is_main_language=False).first()
if child_lang:
create_tag_translation(
main_tag, child_lang, t['title'], tag_index)
def create_tag(title, tag_index):
if Tag.objects.filter(title=title).exists():
return Tag.objects.filter(title=title).first()
else:
tag = Tag(title=title)
tag_index.add_child(instance=tag)
tag.save_revision().publish()
return tag
def create_tag_translation(main_tag, language, translated_title, tag_index):
translated_tag = create_tag(translated_title, tag_index)
if translated_tag:
language_relation = translated_tag.languages.first()
language_relation.language = language
language_relation.save()
translated_tag.save_revision().publish()
PageTranslation.objects.get_or_create(
page=main_tag, translated_page=translated_tag)
class Migration(migrations.Migration):
dependencies = [
('gem', '0013_gemsettings_moderator_name'),
]
operations = [
migrations.RunPython(add_default_tags),
]
|
Add migration for default tags# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import (
SiteLanguage, Tag, TagIndexPage, Main, PageTranslation)
def add_default_tags(apps, schema_editor):
main_lang = SiteLanguage.objects.filter(
is_active=True, is_main_language=True).first()
tag_index = TagIndexPage.objects.first()
tags_list = [
[{'title': 'health', 'locale': 'en'}, {'title': 'الصحة', 'locale': 'ar'}, {'title': 'স্বাস্থ্য', 'locale': 'bn'}],
[{'title': 'periods', 'locale': 'en'}, {'title': 'الدورة', 'locale': 'ar'}, {'title': 'পিরিয়ড', 'locale': 'bn'}]
]
for tag in tags_list:
for t in tag:
if main_lang.locale == t['locale']:
main_tag = create_tag(t['title'], tag_index)
for t in tag:
child_lang = SiteLanguage.objects.filter(
locale=t['locale'], is_main_language=False).first()
if child_lang:
create_tag_translation(
main_tag, child_lang, t['title'], tag_index)
def create_tag(title, tag_index):
if Tag.objects.filter(title=title).exists():
return Tag.objects.filter(title=title).first()
else:
tag = Tag(title=title)
tag_index.add_child(instance=tag)
tag.save_revision().publish()
return tag
def create_tag_translation(main_tag, language, translated_title, tag_index):
translated_tag = create_tag(translated_title, tag_index)
if translated_tag:
language_relation = translated_tag.languages.first()
language_relation.language = language
language_relation.save()
translated_tag.save_revision().publish()
PageTranslation.objects.get_or_create(
page=main_tag, translated_page=translated_tag)
class Migration(migrations.Migration):
dependencies = [
('gem', '0013_gemsettings_moderator_name'),
]
operations = [
migrations.RunPython(add_default_tags),
]
|
<commit_before><commit_msg>Add migration for default tags<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from molo.core.models import (
SiteLanguage, Tag, TagIndexPage, Main, PageTranslation)
def add_default_tags(apps, schema_editor):
main_lang = SiteLanguage.objects.filter(
is_active=True, is_main_language=True).first()
tag_index = TagIndexPage.objects.first()
tags_list = [
[{'title': 'health', 'locale': 'en'}, {'title': 'الصحة', 'locale': 'ar'}, {'title': 'স্বাস্থ্য', 'locale': 'bn'}],
[{'title': 'periods', 'locale': 'en'}, {'title': 'الدورة', 'locale': 'ar'}, {'title': 'পিরিয়ড', 'locale': 'bn'}]
]
for tag in tags_list:
for t in tag:
if main_lang.locale == t['locale']:
main_tag = create_tag(t['title'], tag_index)
for t in tag:
child_lang = SiteLanguage.objects.filter(
locale=t['locale'], is_main_language=False).first()
if child_lang:
create_tag_translation(
main_tag, child_lang, t['title'], tag_index)
def create_tag(title, tag_index):
if Tag.objects.filter(title=title).exists():
return Tag.objects.filter(title=title).first()
else:
tag = Tag(title=title)
tag_index.add_child(instance=tag)
tag.save_revision().publish()
return tag
def create_tag_translation(main_tag, language, translated_title, tag_index):
translated_tag = create_tag(translated_title, tag_index)
if translated_tag:
language_relation = translated_tag.languages.first()
language_relation.language = language
language_relation.save()
translated_tag.save_revision().publish()
PageTranslation.objects.get_or_create(
page=main_tag, translated_page=translated_tag)
class Migration(migrations.Migration):
dependencies = [
('gem', '0013_gemsettings_moderator_name'),
]
operations = [
migrations.RunPython(add_default_tags),
]
|
|
7f04090c574b48b0e1de4590017c7f9960c515fb
|
nova/policies/ips.py
|
nova/policies/ips.py
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return ips_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"""Shows IP addresses details for a network label of a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips/{network_label}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"""Lists IP addresses that are assigned to a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips'
}
]),
]
def list_rules():
return ips_policies
|
Add policy description for Servers IPs
|
Add policy description for Servers IPs
This commit adds policy doc for Servers IPs policies.
Partial implement blueprint policy-docs
Change-Id: I94a7c023dd97413d30f5be9edc313caeb47cb633
|
Python
|
apache-2.0
|
vmturbo/nova,mikalstill/nova,gooddata/openstack-nova,openstack/nova,Juniper/nova,rahulunair/nova,vmturbo/nova,rajalokan/nova,Juniper/nova,gooddata/openstack-nova,vmturbo/nova,rahulunair/nova,vmturbo/nova,mahak/nova,rahulunair/nova,rajalokan/nova,rajalokan/nova,mikalstill/nova,klmitch/nova,klmitch/nova,gooddata/openstack-nova,phenoxim/nova,klmitch/nova,openstack/nova,klmitch/nova,gooddata/openstack-nova,jianghuaw/nova,jianghuaw/nova,jianghuaw/nova,Juniper/nova,phenoxim/nova,rajalokan/nova,mahak/nova,openstack/nova,jianghuaw/nova,mahak/nova,mikalstill/nova,Juniper/nova
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return ips_policies
Add policy description for Servers IPs
This commit adds policy doc for Servers IPs policies.
Partial implement blueprint policy-docs
Change-Id: I94a7c023dd97413d30f5be9edc313caeb47cb633
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"""Shows IP addresses details for a network label of a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips/{network_label}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"""Lists IP addresses that are assigned to a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips'
}
]),
]
def list_rules():
return ips_policies
|
<commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return ips_policies
<commit_msg>Add policy description for Servers IPs
This commit adds policy doc for Servers IPs policies.
Partial implement blueprint policy-docs
Change-Id: I94a7c023dd97413d30f5be9edc313caeb47cb633<commit_after>
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"""Shows IP addresses details for a network label of a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips/{network_label}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"""Lists IP addresses that are assigned to a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips'
}
]),
]
def list_rules():
return ips_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return ips_policies
Add policy description for Servers IPs
This commit adds policy doc for Servers IPs policies.
Partial implement blueprint policy-docs
Change-Id: I94a7c023dd97413d30f5be9edc313caeb47cb633# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"""Shows IP addresses details for a network label of a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips/{network_label}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"""Lists IP addresses that are assigned to a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips'
}
]),
]
def list_rules():
return ips_policies
|
<commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return ips_policies
<commit_msg>Add policy description for Servers IPs
This commit adds policy doc for Servers IPs policies.
Partial implement blueprint policy-docs
Change-Id: I94a7c023dd97413d30f5be9edc313caeb47cb633<commit_after># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
POLICY_ROOT = 'os_compute_api:ips:%s'
ips_policies = [
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"""Shows IP addresses details for a network label of a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips/{network_label}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"""Lists IP addresses that are assigned to a server.""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/ips'
}
]),
]
def list_rules():
return ips_policies
|
37e6ae1dfaa0b3d4615c29236e6783a8f685ebc0
|
mangacork/scripts/check_len_chapter.py
|
mangacork/scripts/check_len_chapter.py
|
import os
from os.path import (isfile, walk)
filename = ''
def id_chapters():
print(os.chdir('../static/images'))
for root, dirs, files in os.walk(os.getcwd()):
if len(dirs) > 0:
filename = str(dirs[0])
def write_file():
f = open(filename, 'w')
f.write('hello world')
f.close()
if __name__ == '__main__':
id_chapters()
|
Add script to write chapter
|
Add script to write chapter
|
Python
|
mit
|
ma3lstrom/manga-cork,ma3lstrom/manga-cork,ma3lstrom/manga-cork
|
Add script to write chapter
|
import os
from os.path import (isfile, walk)
filename = ''
def id_chapters():
print(os.chdir('../static/images'))
for root, dirs, files in os.walk(os.getcwd()):
if len(dirs) > 0:
filename = str(dirs[0])
def write_file():
f = open(filename, 'w')
f.write('hello world')
f.close()
if __name__ == '__main__':
id_chapters()
|
<commit_before><commit_msg>Add script to write chapter<commit_after>
|
import os
from os.path import (isfile, walk)
filename = ''
def id_chapters():
print(os.chdir('../static/images'))
for root, dirs, files in os.walk(os.getcwd()):
if len(dirs) > 0:
filename = str(dirs[0])
def write_file():
f = open(filename, 'w')
f.write('hello world')
f.close()
if __name__ == '__main__':
id_chapters()
|
Add script to write chapterimport os
from os.path import (isfile, walk)
filename = ''
def id_chapters():
print(os.chdir('../static/images'))
for root, dirs, files in os.walk(os.getcwd()):
if len(dirs) > 0:
filename = str(dirs[0])
def write_file():
f = open(filename, 'w')
f.write('hello world')
f.close()
if __name__ == '__main__':
id_chapters()
|
<commit_before><commit_msg>Add script to write chapter<commit_after>import os
from os.path import (isfile, walk)
filename = ''
def id_chapters():
print(os.chdir('../static/images'))
for root, dirs, files in os.walk(os.getcwd()):
if len(dirs) > 0:
filename = str(dirs[0])
def write_file():
f = open(filename, 'w')
f.write('hello world')
f.close()
if __name__ == '__main__':
id_chapters()
|
|
c7a73618be923f5e191e4334728b52fca1300a21
|
indra/databases/mgi_client.py
|
indra/databases/mgi_client.py
|
from collections import defaultdict
from indra.util import read_unicode_csv
from indra.resources import get_resource_path
def get_id_from_name(name):
return mgi_name_to_id.get(name)
def get_name_from_id(mgi_id):
return mgi_id_to_name.get(mgi_id)
def get_synonyms(mgi_id):
return mgi_synonyms.get(mgi_id)
def get_id_from_name_synonym(name_synonym):
mgi_id = mgi_name_to_id.get(name_synonym)
if mgi_id:
return mgi_id
mgi_ids = mgi_synonyms_reverse.get(name_synonym)
if mgi_ids:
if len(mgi_ids) == 1:
return mgi_ids[0]
else:
return mgi_ids
return None
def _read_mgi():
fname = get_resource_path('mgi_entries.tsv')
mgi_id_to_name = {}
mgi_name_to_id = {}
mgi_synonyms = {}
mgi_synonyms_reverse = defaultdict(list)
for mgi_id, name, synonyms_str in read_unicode_csv(fname, '\t'):
if name:
mgi_id_to_name[mgi_id] = name
mgi_name_to_id[name] = mgi_id
if synonyms_str:
synonyms = synonyms_str.split('|')
mgi_synonyms[mgi_id] = synonyms
for synonym in synonyms:
mgi_synonyms_reverse[synonym] = mgi_id
return mgi_id_to_name, mgi_name_to_id, mgi_synonyms, \
dict(mgi_synonyms_reverse)
mgi_id_to_name, mgi_name_to_id, mgi_synonyms, mgi_synonyms_reverse = _read_mgi()
|
Add initial MGI client implementation
|
Add initial MGI client implementation
|
Python
|
bsd-2-clause
|
bgyori/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,bgyori/indra
|
Add initial MGI client implementation
|
from collections import defaultdict
from indra.util import read_unicode_csv
from indra.resources import get_resource_path
def get_id_from_name(name):
return mgi_name_to_id.get(name)
def get_name_from_id(mgi_id):
return mgi_id_to_name.get(mgi_id)
def get_synonyms(mgi_id):
return mgi_synonyms.get(mgi_id)
def get_id_from_name_synonym(name_synonym):
mgi_id = mgi_name_to_id.get(name_synonym)
if mgi_id:
return mgi_id
mgi_ids = mgi_synonyms_reverse.get(name_synonym)
if mgi_ids:
if len(mgi_ids) == 1:
return mgi_ids[0]
else:
return mgi_ids
return None
def _read_mgi():
fname = get_resource_path('mgi_entries.tsv')
mgi_id_to_name = {}
mgi_name_to_id = {}
mgi_synonyms = {}
mgi_synonyms_reverse = defaultdict(list)
for mgi_id, name, synonyms_str in read_unicode_csv(fname, '\t'):
if name:
mgi_id_to_name[mgi_id] = name
mgi_name_to_id[name] = mgi_id
if synonyms_str:
synonyms = synonyms_str.split('|')
mgi_synonyms[mgi_id] = synonyms
for synonym in synonyms:
mgi_synonyms_reverse[synonym] = mgi_id
return mgi_id_to_name, mgi_name_to_id, mgi_synonyms, \
dict(mgi_synonyms_reverse)
mgi_id_to_name, mgi_name_to_id, mgi_synonyms, mgi_synonyms_reverse = _read_mgi()
|
<commit_before><commit_msg>Add initial MGI client implementation<commit_after>
|
from collections import defaultdict
from indra.util import read_unicode_csv
from indra.resources import get_resource_path
def get_id_from_name(name):
return mgi_name_to_id.get(name)
def get_name_from_id(mgi_id):
return mgi_id_to_name.get(mgi_id)
def get_synonyms(mgi_id):
return mgi_synonyms.get(mgi_id)
def get_id_from_name_synonym(name_synonym):
mgi_id = mgi_name_to_id.get(name_synonym)
if mgi_id:
return mgi_id
mgi_ids = mgi_synonyms_reverse.get(name_synonym)
if mgi_ids:
if len(mgi_ids) == 1:
return mgi_ids[0]
else:
return mgi_ids
return None
def _read_mgi():
fname = get_resource_path('mgi_entries.tsv')
mgi_id_to_name = {}
mgi_name_to_id = {}
mgi_synonyms = {}
mgi_synonyms_reverse = defaultdict(list)
for mgi_id, name, synonyms_str in read_unicode_csv(fname, '\t'):
if name:
mgi_id_to_name[mgi_id] = name
mgi_name_to_id[name] = mgi_id
if synonyms_str:
synonyms = synonyms_str.split('|')
mgi_synonyms[mgi_id] = synonyms
for synonym in synonyms:
mgi_synonyms_reverse[synonym] = mgi_id
return mgi_id_to_name, mgi_name_to_id, mgi_synonyms, \
dict(mgi_synonyms_reverse)
mgi_id_to_name, mgi_name_to_id, mgi_synonyms, mgi_synonyms_reverse = _read_mgi()
|
Add initial MGI client implementationfrom collections import defaultdict
from indra.util import read_unicode_csv
from indra.resources import get_resource_path
def get_id_from_name(name):
return mgi_name_to_id.get(name)
def get_name_from_id(mgi_id):
return mgi_id_to_name.get(mgi_id)
def get_synonyms(mgi_id):
return mgi_synonyms.get(mgi_id)
def get_id_from_name_synonym(name_synonym):
mgi_id = mgi_name_to_id.get(name_synonym)
if mgi_id:
return mgi_id
mgi_ids = mgi_synonyms_reverse.get(name_synonym)
if mgi_ids:
if len(mgi_ids) == 1:
return mgi_ids[0]
else:
return mgi_ids
return None
def _read_mgi():
fname = get_resource_path('mgi_entries.tsv')
mgi_id_to_name = {}
mgi_name_to_id = {}
mgi_synonyms = {}
mgi_synonyms_reverse = defaultdict(list)
for mgi_id, name, synonyms_str in read_unicode_csv(fname, '\t'):
if name:
mgi_id_to_name[mgi_id] = name
mgi_name_to_id[name] = mgi_id
if synonyms_str:
synonyms = synonyms_str.split('|')
mgi_synonyms[mgi_id] = synonyms
for synonym in synonyms:
mgi_synonyms_reverse[synonym] = mgi_id
return mgi_id_to_name, mgi_name_to_id, mgi_synonyms, \
dict(mgi_synonyms_reverse)
mgi_id_to_name, mgi_name_to_id, mgi_synonyms, mgi_synonyms_reverse = _read_mgi()
|
<commit_before><commit_msg>Add initial MGI client implementation<commit_after>from collections import defaultdict
from indra.util import read_unicode_csv
from indra.resources import get_resource_path
def get_id_from_name(name):
return mgi_name_to_id.get(name)
def get_name_from_id(mgi_id):
return mgi_id_to_name.get(mgi_id)
def get_synonyms(mgi_id):
return mgi_synonyms.get(mgi_id)
def get_id_from_name_synonym(name_synonym):
mgi_id = mgi_name_to_id.get(name_synonym)
if mgi_id:
return mgi_id
mgi_ids = mgi_synonyms_reverse.get(name_synonym)
if mgi_ids:
if len(mgi_ids) == 1:
return mgi_ids[0]
else:
return mgi_ids
return None
def _read_mgi():
fname = get_resource_path('mgi_entries.tsv')
mgi_id_to_name = {}
mgi_name_to_id = {}
mgi_synonyms = {}
mgi_synonyms_reverse = defaultdict(list)
for mgi_id, name, synonyms_str in read_unicode_csv(fname, '\t'):
if name:
mgi_id_to_name[mgi_id] = name
mgi_name_to_id[name] = mgi_id
if synonyms_str:
synonyms = synonyms_str.split('|')
mgi_synonyms[mgi_id] = synonyms
for synonym in synonyms:
mgi_synonyms_reverse[synonym] = mgi_id
return mgi_id_to_name, mgi_name_to_id, mgi_synonyms, \
dict(mgi_synonyms_reverse)
mgi_id_to_name, mgi_name_to_id, mgi_synonyms, mgi_synonyms_reverse = _read_mgi()
|
|
4a985c3695c5781ab90c35b929eb21c3207d11ff
|
bluebottle/files/tests/test_models.py
|
bluebottle/files/tests/test_models.py
|
from django.test import TestCase
from bluebottle.files.tests.factories import ImageFactory
class FileTestCase(TestCase):
def test_file_properties(self):
image = ImageFactory.create()
self.assertEqual(str(image), str(image.id))
self.assertGreater(len(str(image)), 8)
|
Add test for file model
|
Add test for file model
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add test for file model
|
from django.test import TestCase
from bluebottle.files.tests.factories import ImageFactory
class FileTestCase(TestCase):
def test_file_properties(self):
image = ImageFactory.create()
self.assertEqual(str(image), str(image.id))
self.assertGreater(len(str(image)), 8)
|
<commit_before><commit_msg>Add test for file model<commit_after>
|
from django.test import TestCase
from bluebottle.files.tests.factories import ImageFactory
class FileTestCase(TestCase):
def test_file_properties(self):
image = ImageFactory.create()
self.assertEqual(str(image), str(image.id))
self.assertGreater(len(str(image)), 8)
|
Add test for file modelfrom django.test import TestCase
from bluebottle.files.tests.factories import ImageFactory
class FileTestCase(TestCase):
def test_file_properties(self):
image = ImageFactory.create()
self.assertEqual(str(image), str(image.id))
self.assertGreater(len(str(image)), 8)
|
<commit_before><commit_msg>Add test for file model<commit_after>from django.test import TestCase
from bluebottle.files.tests.factories import ImageFactory
class FileTestCase(TestCase):
def test_file_properties(self):
image = ImageFactory.create()
self.assertEqual(str(image), str(image.id))
self.assertGreater(len(str(image)), 8)
|
|
860440c97fe9d972ba72d56e5ef18a278f0597c0
|
svg-xml-script-template.py
|
svg-xml-script-template.py
|
#!/usr/bin/env python3
"""
SPDX-License-Identifier: LicenseRef-KDE-Accepted-LGPL
SPDX-FileCopyrightText: 2020 Noah Davis <noahadvs@gmail.com>
SPDX-FileCopyrightText: 2020 Niccolò Venerandi <niccolo@venerandi.com>
"""
import sys
from lxml import etree
"""
This is a template for making scripts that modify SVGs by parsing XML.
"""
# These are needed to prevent nonsense namespaces like ns0 from being
# added to otherwise perfectly fine svg elements and attributes
etree.register_namespace("w3c", "http://www.w3.org/2000/svg")
etree.register_namespace("xlink", "http://www.w3.org/1999/xlink")
etree.register_namespace("inkscape", "http://www.inkscape.org/namespaces/inkscape")
etree.register_namespace("dc", "http://purl.org/dc/elements/1.1/")
etree.register_namespace("cc", "http://creativecommons.org/ns#")
etree.register_namespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
etree.register_namespace("sodipodi", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
# Get filenames as arguments. Combine with your favorite CLI tools.
# My favorites are rg (aka ripgrep) and fd (aka fd-find).
# Remember to filter out files that are not SVGs!
# Example: ./this-script.py $(rg -t svg -l 'height="0"')
for f in sys.argv[1:]:
tree = etree.parse(f)
root = tree.getroot()
wasEdited = False
# BEGIN section
# Reimplement this section as needed
# {http://www.w3.org/2000/svg} is needed to find SVG elements
# Example: find all rect elements
for elem in root.iterfind(".//{http://www.w3.org/2000/svg}rect"):
# Example: find rect elements where height="0"
if (elem.get("height") == "0"):
# Example: remove rect elements that have height="0"
elem.getparent().remove(elem)
wasEdited = True # Remember to keep this
# END section
print(f + ": " + ("edited" if wasEdited else "ignored"))
if wasEdited:
tree.write(f, encoding="utf-8", xml_declaration=False, method="xml")
|
Add template for making python scripts that edit SVGs
|
Add template for making python scripts that edit SVGs
|
Python
|
lgpl-2.1
|
KDE/breeze-icons,KDE/breeze-icons,KDE/breeze-icons,KDE/breeze-icons,KDE/breeze-icons
|
Add template for making python scripts that edit SVGs
|
#!/usr/bin/env python3
"""
SPDX-License-Identifier: LicenseRef-KDE-Accepted-LGPL
SPDX-FileCopyrightText: 2020 Noah Davis <noahadvs@gmail.com>
SPDX-FileCopyrightText: 2020 Niccolò Venerandi <niccolo@venerandi.com>
"""
import sys
from lxml import etree
"""
This is a template for making scripts that modify SVGs by parsing XML.
"""
# These are needed to prevent nonsense namespaces like ns0 from being
# added to otherwise perfectly fine svg elements and attributes
etree.register_namespace("w3c", "http://www.w3.org/2000/svg")
etree.register_namespace("xlink", "http://www.w3.org/1999/xlink")
etree.register_namespace("inkscape", "http://www.inkscape.org/namespaces/inkscape")
etree.register_namespace("dc", "http://purl.org/dc/elements/1.1/")
etree.register_namespace("cc", "http://creativecommons.org/ns#")
etree.register_namespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
etree.register_namespace("sodipodi", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
# Get filenames as arguments. Combine with your favorite CLI tools.
# My favorites are rg (aka ripgrep) and fd (aka fd-find).
# Remember to filter out files that are not SVGs!
# Example: ./this-script.py $(rg -t svg -l 'height="0"')
for f in sys.argv[1:]:
tree = etree.parse(f)
root = tree.getroot()
wasEdited = False
# BEGIN section
# Reimplement this section as needed
# {http://www.w3.org/2000/svg} is needed to find SVG elements
# Example: find all rect elements
for elem in root.iterfind(".//{http://www.w3.org/2000/svg}rect"):
# Example: find rect elements where height="0"
if (elem.get("height") == "0"):
# Example: remove rect elements that have height="0"
elem.getparent().remove(elem)
wasEdited = True # Remember to keep this
# END section
print(f + ": " + ("edited" if wasEdited else "ignored"))
if wasEdited:
tree.write(f, encoding="utf-8", xml_declaration=False, method="xml")
|
<commit_before><commit_msg>Add template for making python scripts that edit SVGs<commit_after>
|
#!/usr/bin/env python3
"""
SPDX-License-Identifier: LicenseRef-KDE-Accepted-LGPL
SPDX-FileCopyrightText: 2020 Noah Davis <noahadvs@gmail.com>
SPDX-FileCopyrightText: 2020 Niccolò Venerandi <niccolo@venerandi.com>
"""
import sys
from lxml import etree
"""
This is a template for making scripts that modify SVGs by parsing XML.
"""
# These are needed to prevent nonsense namespaces like ns0 from being
# added to otherwise perfectly fine svg elements and attributes
etree.register_namespace("w3c", "http://www.w3.org/2000/svg")
etree.register_namespace("xlink", "http://www.w3.org/1999/xlink")
etree.register_namespace("inkscape", "http://www.inkscape.org/namespaces/inkscape")
etree.register_namespace("dc", "http://purl.org/dc/elements/1.1/")
etree.register_namespace("cc", "http://creativecommons.org/ns#")
etree.register_namespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
etree.register_namespace("sodipodi", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
# Get filenames as arguments. Combine with your favorite CLI tools.
# My favorites are rg (aka ripgrep) and fd (aka fd-find).
# Remember to filter out files that are not SVGs!
# Example: ./this-script.py $(rg -t svg -l 'height="0"')
for f in sys.argv[1:]:
tree = etree.parse(f)
root = tree.getroot()
wasEdited = False
# BEGIN section
# Reimplement this section as needed
# {http://www.w3.org/2000/svg} is needed to find SVG elements
# Example: find all rect elements
for elem in root.iterfind(".//{http://www.w3.org/2000/svg}rect"):
# Example: find rect elements where height="0"
if (elem.get("height") == "0"):
# Example: remove rect elements that have height="0"
elem.getparent().remove(elem)
wasEdited = True # Remember to keep this
# END section
print(f + ": " + ("edited" if wasEdited else "ignored"))
if wasEdited:
tree.write(f, encoding="utf-8", xml_declaration=False, method="xml")
|
Add template for making python scripts that edit SVGs#!/usr/bin/env python3
"""
SPDX-License-Identifier: LicenseRef-KDE-Accepted-LGPL
SPDX-FileCopyrightText: 2020 Noah Davis <noahadvs@gmail.com>
SPDX-FileCopyrightText: 2020 Niccolò Venerandi <niccolo@venerandi.com>
"""
import sys
from lxml import etree
"""
This is a template for making scripts that modify SVGs by parsing XML.
"""
# These are needed to prevent nonsense namespaces like ns0 from being
# added to otherwise perfectly fine svg elements and attributes
etree.register_namespace("w3c", "http://www.w3.org/2000/svg")
etree.register_namespace("xlink", "http://www.w3.org/1999/xlink")
etree.register_namespace("inkscape", "http://www.inkscape.org/namespaces/inkscape")
etree.register_namespace("dc", "http://purl.org/dc/elements/1.1/")
etree.register_namespace("cc", "http://creativecommons.org/ns#")
etree.register_namespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
etree.register_namespace("sodipodi", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
# Get filenames as arguments. Combine with your favorite CLI tools.
# My favorites are rg (aka ripgrep) and fd (aka fd-find).
# Remember to filter out files that are not SVGs!
# Example: ./this-script.py $(rg -t svg -l 'height="0"')
for f in sys.argv[1:]:
tree = etree.parse(f)
root = tree.getroot()
wasEdited = False
# BEGIN section
# Reimplement this section as needed
# {http://www.w3.org/2000/svg} is needed to find SVG elements
# Example: find all rect elements
for elem in root.iterfind(".//{http://www.w3.org/2000/svg}rect"):
# Example: find rect elements where height="0"
if (elem.get("height") == "0"):
# Example: remove rect elements that have height="0"
elem.getparent().remove(elem)
wasEdited = True # Remember to keep this
# END section
print(f + ": " + ("edited" if wasEdited else "ignored"))
if wasEdited:
tree.write(f, encoding="utf-8", xml_declaration=False, method="xml")
|
<commit_before><commit_msg>Add template for making python scripts that edit SVGs<commit_after>#!/usr/bin/env python3
"""
SPDX-License-Identifier: LicenseRef-KDE-Accepted-LGPL
SPDX-FileCopyrightText: 2020 Noah Davis <noahadvs@gmail.com>
SPDX-FileCopyrightText: 2020 Niccolò Venerandi <niccolo@venerandi.com>
"""
import sys
from lxml import etree
"""
This is a template for making scripts that modify SVGs by parsing XML.
"""
# These are needed to prevent nonsense namespaces like ns0 from being
# added to otherwise perfectly fine svg elements and attributes
etree.register_namespace("w3c", "http://www.w3.org/2000/svg")
etree.register_namespace("xlink", "http://www.w3.org/1999/xlink")
etree.register_namespace("inkscape", "http://www.inkscape.org/namespaces/inkscape")
etree.register_namespace("dc", "http://purl.org/dc/elements/1.1/")
etree.register_namespace("cc", "http://creativecommons.org/ns#")
etree.register_namespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
etree.register_namespace("sodipodi", "http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd")
# Get filenames as arguments. Combine with your favorite CLI tools.
# My favorites are rg (aka ripgrep) and fd (aka fd-find).
# Remember to filter out files that are not SVGs!
# Example: ./this-script.py $(rg -t svg -l 'height="0"')
for f in sys.argv[1:]:
tree = etree.parse(f)
root = tree.getroot()
wasEdited = False
# BEGIN section
# Reimplement this section as needed
# {http://www.w3.org/2000/svg} is needed to find SVG elements
# Example: find all rect elements
for elem in root.iterfind(".//{http://www.w3.org/2000/svg}rect"):
# Example: find rect elements where height="0"
if (elem.get("height") == "0"):
# Example: remove rect elements that have height="0"
elem.getparent().remove(elem)
wasEdited = True # Remember to keep this
# END section
print(f + ": " + ("edited" if wasEdited else "ignored"))
if wasEdited:
tree.write(f, encoding="utf-8", xml_declaration=False, method="xml")
|
|
97db65bc9ece1ffef292b60b67391182cdaae8fb
|
migrations/versions/0133_set_services_sms_prefix.py
|
migrations/versions/0133_set_services_sms_prefix.py
|
import os
from app import config
"""
Revision ID: 0133_set_services_sms_prefix
Revises: 0132_add_sms_prefix_setting
Create Date: 2017-11-03 15:55:35.657488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0133_set_services_sms_prefix'
down_revision = '0132_add_sms_prefix_setting'
config = config.configs[os.environ['NOTIFY_ENVIRONMENT']]
default_sms_sender = config.FROM_NUMBER
def upgrade():
op.execute("""
update services set prefix_sms = True
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender = '{}'
)
""".format(default_sms_sender))
op.execute("""
update services set prefix_sms = False
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender != '{}'
)
""".format(default_sms_sender))
def downgrade():
op.execute("""
UPDATE services set prefix_sms = null
""")
|
Migrate prefix SMS setting to be true or false
|
Migrate prefix SMS setting to be true or false
We want services to have control over this setting, rather than deriving
it from the value of their sender. This commit does that derivation one
last time, and stores it in the column, where it can be changed as and
when needed.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Migrate prefix SMS setting to be true or false
We want services to have control over this setting, rather than deriving
it from the value of their sender. This commit does that derivation one
last time, and stores it in the column, where it can be changed as and
when needed.
|
import os
from app import config
"""
Revision ID: 0133_set_services_sms_prefix
Revises: 0132_add_sms_prefix_setting
Create Date: 2017-11-03 15:55:35.657488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0133_set_services_sms_prefix'
down_revision = '0132_add_sms_prefix_setting'
config = config.configs[os.environ['NOTIFY_ENVIRONMENT']]
default_sms_sender = config.FROM_NUMBER
def upgrade():
op.execute("""
update services set prefix_sms = True
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender = '{}'
)
""".format(default_sms_sender))
op.execute("""
update services set prefix_sms = False
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender != '{}'
)
""".format(default_sms_sender))
def downgrade():
op.execute("""
UPDATE services set prefix_sms = null
""")
|
<commit_before><commit_msg>Migrate prefix SMS setting to be true or false
We want services to have control over this setting, rather than deriving
it from the value of their sender. This commit does that derivation one
last time, and stores it in the column, where it can be changed as and
when needed.<commit_after>
|
import os
from app import config
"""
Revision ID: 0133_set_services_sms_prefix
Revises: 0132_add_sms_prefix_setting
Create Date: 2017-11-03 15:55:35.657488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0133_set_services_sms_prefix'
down_revision = '0132_add_sms_prefix_setting'
config = config.configs[os.environ['NOTIFY_ENVIRONMENT']]
default_sms_sender = config.FROM_NUMBER
def upgrade():
op.execute("""
update services set prefix_sms = True
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender = '{}'
)
""".format(default_sms_sender))
op.execute("""
update services set prefix_sms = False
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender != '{}'
)
""".format(default_sms_sender))
def downgrade():
op.execute("""
UPDATE services set prefix_sms = null
""")
|
Migrate prefix SMS setting to be true or false
We want services to have control over this setting, rather than deriving
it from the value of their sender. This commit does that derivation one
last time, and stores it in the column, where it can be changed as and
when needed.import os
from app import config
"""
Revision ID: 0133_set_services_sms_prefix
Revises: 0132_add_sms_prefix_setting
Create Date: 2017-11-03 15:55:35.657488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0133_set_services_sms_prefix'
down_revision = '0132_add_sms_prefix_setting'
config = config.configs[os.environ['NOTIFY_ENVIRONMENT']]
default_sms_sender = config.FROM_NUMBER
def upgrade():
op.execute("""
update services set prefix_sms = True
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender = '{}'
)
""".format(default_sms_sender))
op.execute("""
update services set prefix_sms = False
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender != '{}'
)
""".format(default_sms_sender))
def downgrade():
op.execute("""
UPDATE services set prefix_sms = null
""")
|
<commit_before><commit_msg>Migrate prefix SMS setting to be true or false
We want services to have control over this setting, rather than deriving
it from the value of their sender. This commit does that derivation one
last time, and stores it in the column, where it can be changed as and
when needed.<commit_after>import os
from app import config
"""
Revision ID: 0133_set_services_sms_prefix
Revises: 0132_add_sms_prefix_setting
Create Date: 2017-11-03 15:55:35.657488
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0133_set_services_sms_prefix'
down_revision = '0132_add_sms_prefix_setting'
config = config.configs[os.environ['NOTIFY_ENVIRONMENT']]
default_sms_sender = config.FROM_NUMBER
def upgrade():
op.execute("""
update services set prefix_sms = True
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender = '{}'
)
""".format(default_sms_sender))
op.execute("""
update services set prefix_sms = False
where id in (
select service_id from service_sms_senders
where is_default = True and sms_sender != '{}'
)
""".format(default_sms_sender))
def downgrade():
op.execute("""
UPDATE services set prefix_sms = null
""")
|
|
159006e87cbbb08689284ae2534f556f66c0159b
|
alabaster/__init__.py
|
alabaster/__init__.py
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
Declare extension version and parallel read safety
|
Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.
|
Python
|
bsd-3-clause
|
bgeron/alabaster,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,charnpreetsingh/jupyter-alabaster-theme,bgeron/alabaster,ellisonbg/jupyter-alabaster-theme,nikolas/alabaster,charnpreetsingh/jupyter-alabaster-theme,jupytercalpoly/jupyter-alabaster-theme,ellisonbg/jupyter-alabaster-theme
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
<commit_before>import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
<commit_msg>Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.<commit_after>
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
<commit_before>import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
<commit_msg>Declare extension version and parallel read safety
This is necessary for Sphinx' parallel read feature to work, since we import alabaster all the time now.<commit_after>import os
from alabaster import _version as version
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def update_context(app, pagename, templatename, context, doctree):
context['alabaster_version'] = version.__version__
def setup(app):
app.connect('html-page-context', update_context)
return {'version': version.__version__,
'parallel_read_safe': True}
|
71ddadda05f70e53ee3d7ec496797c11ad080583
|
tests/functional/test_lintmigrations_command.py
|
tests/functional/test_lintmigrations_command.py
|
from unittest.mock import patch
from django.core.management import call_command
from django.test import TransactionTestCase
class LintMigrationsCommandTestCase(TransactionTestCase):
databases = {"default", "sqlite"}
def test_plain(self):
with self.assertRaises(SystemExit):
call_command("lintmigrations")
def test_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
call_command("lintmigrations")
def test_command_line_app_label(self):
call_command("lintmigrations", app_label="app_correct")
def test_command_line_and_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
with self.assertRaises(SystemExit):
call_command("lintmigrations", app_label="app_drop_table")
|
Add funtional tests for lintmigrations command
|
Add funtional tests for lintmigrations command
|
Python
|
apache-2.0
|
3YOURMIND/django-migration-linter
|
Add funtional tests for lintmigrations command
|
from unittest.mock import patch
from django.core.management import call_command
from django.test import TransactionTestCase
class LintMigrationsCommandTestCase(TransactionTestCase):
databases = {"default", "sqlite"}
def test_plain(self):
with self.assertRaises(SystemExit):
call_command("lintmigrations")
def test_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
call_command("lintmigrations")
def test_command_line_app_label(self):
call_command("lintmigrations", app_label="app_correct")
def test_command_line_and_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
with self.assertRaises(SystemExit):
call_command("lintmigrations", app_label="app_drop_table")
|
<commit_before><commit_msg>Add funtional tests for lintmigrations command<commit_after>
|
from unittest.mock import patch
from django.core.management import call_command
from django.test import TransactionTestCase
class LintMigrationsCommandTestCase(TransactionTestCase):
databases = {"default", "sqlite"}
def test_plain(self):
with self.assertRaises(SystemExit):
call_command("lintmigrations")
def test_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
call_command("lintmigrations")
def test_command_line_app_label(self):
call_command("lintmigrations", app_label="app_correct")
def test_command_line_and_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
with self.assertRaises(SystemExit):
call_command("lintmigrations", app_label="app_drop_table")
|
Add funtional tests for lintmigrations commandfrom unittest.mock import patch
from django.core.management import call_command
from django.test import TransactionTestCase
class LintMigrationsCommandTestCase(TransactionTestCase):
databases = {"default", "sqlite"}
def test_plain(self):
with self.assertRaises(SystemExit):
call_command("lintmigrations")
def test_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
call_command("lintmigrations")
def test_command_line_app_label(self):
call_command("lintmigrations", app_label="app_correct")
def test_command_line_and_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
with self.assertRaises(SystemExit):
call_command("lintmigrations", app_label="app_drop_table")
|
<commit_before><commit_msg>Add funtional tests for lintmigrations command<commit_after>from unittest.mock import patch
from django.core.management import call_command
from django.test import TransactionTestCase
class LintMigrationsCommandTestCase(TransactionTestCase):
databases = {"default", "sqlite"}
def test_plain(self):
with self.assertRaises(SystemExit):
call_command("lintmigrations")
def test_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
call_command("lintmigrations")
def test_command_line_app_label(self):
call_command("lintmigrations", app_label="app_correct")
def test_command_line_and_config_file_app_label(self):
with patch(
"django_migration_linter.management.commands.lintmigrations.Command.read_config_file"
) as config_fn:
config_fn.return_value = {"app_label": "app_correct"}
with self.assertRaises(SystemExit):
call_command("lintmigrations", app_label="app_drop_table")
|
|
3cc84213d5d4c552705a8fdbf999b6dd9e428ebe
|
tests/test_channel_shim.py
|
tests/test_channel_shim.py
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
if v >= (0, 13, 0) and v < (1, 0, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
Add some checks that the Channel shim chooses gevent.queue.Channel
|
Add some checks that the Channel shim chooses gevent.queue.Channel
Previously, there was no checks to make sure that gevent.queue.Channel
was used when possible.
|
Python
|
bsd-3-clause
|
DataDog/wal-e,equa/wal-e,heroku/wal-e,wal-e/wal-e,fdr/wal-e,nagual13/wal-e,x86Labs/wal-e,RichardKnop/wal-e,intoximeters/wal-e,ArtemZ/wal-e,ajmarks/wal-e,tenstartups/wal-e
|
Add some checks that the Channel shim chooses gevent.queue.Channel
Previously, there was no checks to make sure that gevent.queue.Channel
was used when possible.
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
if v >= (0, 13, 0) and v < (1, 0, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
<commit_before><commit_msg>Add some checks that the Channel shim chooses gevent.queue.Channel
Previously, there was no checks to make sure that gevent.queue.Channel
was used when possible.<commit_after>
|
import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
if v >= (0, 13, 0) and v < (1, 0, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
Add some checks that the Channel shim chooses gevent.queue.Channel
Previously, there was no checks to make sure that gevent.queue.Channel
was used when possible.import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
if v >= (0, 13, 0) and v < (1, 0, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
<commit_before><commit_msg>Add some checks that the Channel shim chooses gevent.queue.Channel
Previously, there was no checks to make sure that gevent.queue.Channel
was used when possible.<commit_after>import gevent
from gevent import queue
from wal_e import channel
def test_channel_shim():
v = tuple(int(x) for x in gevent.__version__.split('.'))
if v >= (0, 13, 0) and v < (1, 0, 0):
assert isinstance(channel.Channel(), queue.Queue)
elif v >= (1, 0, 0):
assert isinstance(channel.Channel(), queue.Channel)
else:
assert False, 'Unexpected version ' + gevent.__version__
|
|
45a165c5516f3658e45073c0689748dd8b39cadb
|
migrations/versions/8da7405903f6_.py
|
migrations/versions/8da7405903f6_.py
|
"""Add BackgroundTasks settings table.
Revision ID: 8da7405903f6
Revises: 69efa7247067
Create Date: 2020-09-05 20:02:53.642956
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8da7405903f6'
down_revision = '69efa7247067'
branch_labels = None
depends_on = None
def upgrade():
try:
op.create_table(
'background_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('every', sa.String(length=10), nullable=True),
sa.Column('time', sa.Time(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'))
except Exception:
pass
def downgrade():
op.drop_table('background_tasks')
|
Add BackgroundTasks table migration file.
|
Add BackgroundTasks table migration file.
|
Python
|
mpl-2.0
|
mrf345/FQM,mrf345/FQM,mrf345/FQM,mrf345/FQM
|
Add BackgroundTasks table migration file.
|
"""Add BackgroundTasks settings table.
Revision ID: 8da7405903f6
Revises: 69efa7247067
Create Date: 2020-09-05 20:02:53.642956
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8da7405903f6'
down_revision = '69efa7247067'
branch_labels = None
depends_on = None
def upgrade():
try:
op.create_table(
'background_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('every', sa.String(length=10), nullable=True),
sa.Column('time', sa.Time(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'))
except Exception:
pass
def downgrade():
op.drop_table('background_tasks')
|
<commit_before><commit_msg>Add BackgroundTasks table migration file.<commit_after>
|
"""Add BackgroundTasks settings table.
Revision ID: 8da7405903f6
Revises: 69efa7247067
Create Date: 2020-09-05 20:02:53.642956
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8da7405903f6'
down_revision = '69efa7247067'
branch_labels = None
depends_on = None
def upgrade():
try:
op.create_table(
'background_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('every', sa.String(length=10), nullable=True),
sa.Column('time', sa.Time(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'))
except Exception:
pass
def downgrade():
op.drop_table('background_tasks')
|
Add BackgroundTasks table migration file."""Add BackgroundTasks settings table.
Revision ID: 8da7405903f6
Revises: 69efa7247067
Create Date: 2020-09-05 20:02:53.642956
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8da7405903f6'
down_revision = '69efa7247067'
branch_labels = None
depends_on = None
def upgrade():
try:
op.create_table(
'background_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('every', sa.String(length=10), nullable=True),
sa.Column('time', sa.Time(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'))
except Exception:
pass
def downgrade():
op.drop_table('background_tasks')
|
<commit_before><commit_msg>Add BackgroundTasks table migration file.<commit_after>"""Add BackgroundTasks settings table.
Revision ID: 8da7405903f6
Revises: 69efa7247067
Create Date: 2020-09-05 20:02:53.642956
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8da7405903f6'
down_revision = '69efa7247067'
branch_labels = None
depends_on = None
def upgrade():
try:
op.create_table(
'background_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('every', sa.String(length=10), nullable=True),
sa.Column('time', sa.Time(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'))
except Exception:
pass
def downgrade():
op.drop_table('background_tasks')
|
|
7cfe8666e9146a9065b9942a5bdf16d4f1acd44d
|
utils/graph500-generator/graph500-binary-to-text.py
|
utils/graph500-generator/graph500-binary-to-text.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# © 2014 Mihai Capotă
"""Transform the output of the Graph 500 generator into text."""
from __future__ import division, print_function
import argparse
import struct
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("fin", help="input file")
argparser.add_argument("fout", help="output file")
args = argparser.parse_args()
with open(args.fin, "rb") as fin, open(args.fout, "w") as fout:
while True:
edge = fin.read(16)
if edge == "":
break
(v1, v2) = struct.unpack("qq", edge)
print(v1, v2, file=fout)
if __name__ == "__main__":
main()
|
Add script to decode Graph 500 generator output
|
Add script to decode Graph 500 generator output
|
Python
|
apache-2.0
|
tudelft-atlarge/graphalytics,tudelft-atlarge/graphalytics,tudelft-atlarge/graphalytics,tudelft-atlarge/graphalytics,ldbc/ldbc_graphalytics,ldbc/ldbc_graphalytics,tudelft-atlarge/graphalytics,ldbc/ldbc_graphalytics,ldbc/ldbc_graphalytics,ldbc/ldbc_graphalytics
|
Add script to decode Graph 500 generator output
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# © 2014 Mihai Capotă
"""Transform the output of the Graph 500 generator into text."""
from __future__ import division, print_function
import argparse
import struct
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("fin", help="input file")
argparser.add_argument("fout", help="output file")
args = argparser.parse_args()
with open(args.fin, "rb") as fin, open(args.fout, "w") as fout:
while True:
edge = fin.read(16)
if edge == "":
break
(v1, v2) = struct.unpack("qq", edge)
print(v1, v2, file=fout)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to decode Graph 500 generator output<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# © 2014 Mihai Capotă
"""Transform the output of the Graph 500 generator into text."""
from __future__ import division, print_function
import argparse
import struct
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("fin", help="input file")
argparser.add_argument("fout", help="output file")
args = argparser.parse_args()
with open(args.fin, "rb") as fin, open(args.fout, "w") as fout:
while True:
edge = fin.read(16)
if edge == "":
break
(v1, v2) = struct.unpack("qq", edge)
print(v1, v2, file=fout)
if __name__ == "__main__":
main()
|
Add script to decode Graph 500 generator output#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# © 2014 Mihai Capotă
"""Transform the output of the Graph 500 generator into text."""
from __future__ import division, print_function
import argparse
import struct
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("fin", help="input file")
argparser.add_argument("fout", help="output file")
args = argparser.parse_args()
with open(args.fin, "rb") as fin, open(args.fout, "w") as fout:
while True:
edge = fin.read(16)
if edge == "":
break
(v1, v2) = struct.unpack("qq", edge)
print(v1, v2, file=fout)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to decode Graph 500 generator output<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# © 2014 Mihai Capotă
"""Transform the output of the Graph 500 generator into text."""
from __future__ import division, print_function
import argparse
import struct
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("fin", help="input file")
argparser.add_argument("fout", help="output file")
args = argparser.parse_args()
with open(args.fin, "rb") as fin, open(args.fout, "w") as fout:
while True:
edge = fin.read(16)
if edge == "":
break
(v1, v2) = struct.unpack("qq", edge)
print(v1, v2, file=fout)
if __name__ == "__main__":
main()
|
|
dd70fcd512962c5248928a1c9b897fc33249f567
|
judge/utils/views.py
|
judge/utils/views.py
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
__author__ = 'Quantum'
def generic_message(request, title, message):
return render_to_response('generic_message.jade', {
'message': message,
'title': title
}, context_instance=RequestContext(request))
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
__author__ = 'Quantum'
def generic_message(request, title, message, status=None):
return render(request, 'generic_message.jade', {
'message': message,
'title': title
}, status=status)
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
Use the render shortcut which defaults to RequestContext and allows passing a status code
|
Use the render shortcut which defaults to RequestContext and allows passing a status code
|
Python
|
agpl-3.0
|
Minkov/site,monouno/site,monouno/site,Phoenix1369/site,DMOJ/site,monouno/site,DMOJ/site,Minkov/site,Phoenix1369/site,DMOJ/site,Minkov/site,Phoenix1369/site,monouno/site,monouno/site,DMOJ/site,Minkov/site,Phoenix1369/site
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
__author__ = 'Quantum'
def generic_message(request, title, message):
return render_to_response('generic_message.jade', {
'message': message,
'title': title
}, context_instance=RequestContext(request))
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)Use the render shortcut which defaults to RequestContext and allows passing a status code
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
__author__ = 'Quantum'
def generic_message(request, title, message, status=None):
return render(request, 'generic_message.jade', {
'message': message,
'title': title
}, status=status)
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
__author__ = 'Quantum'
def generic_message(request, title, message):
return render_to_response('generic_message.jade', {
'message': message,
'title': title
}, context_instance=RequestContext(request))
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)<commit_msg>Use the render shortcut which defaults to RequestContext and allows passing a status code<commit_after>
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
__author__ = 'Quantum'
def generic_message(request, title, message, status=None):
return render(request, 'generic_message.jade', {
'message': message,
'title': title
}, status=status)
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
__author__ = 'Quantum'
def generic_message(request, title, message):
return render_to_response('generic_message.jade', {
'message': message,
'title': title
}, context_instance=RequestContext(request))
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)Use the render shortcut which defaults to RequestContext and allows passing a status codefrom django.contrib.auth.decorators import login_required
from django.shortcuts import render
__author__ = 'Quantum'
def generic_message(request, title, message, status=None):
return render(request, 'generic_message.jade', {
'message': message,
'title': title
}, status=status)
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.template import RequestContext
__author__ = 'Quantum'
def generic_message(request, title, message):
return render_to_response('generic_message.jade', {
'message': message,
'title': title
}, context_instance=RequestContext(request))
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)<commit_msg>Use the render shortcut which defaults to RequestContext and allows passing a status code<commit_after>from django.contrib.auth.decorators import login_required
from django.shortcuts import render
__author__ = 'Quantum'
def generic_message(request, title, message, status=None):
return render(request, 'generic_message.jade', {
'message': message,
'title': title
}, status=status)
class TitleMixin(object):
title = '(untitled)'
def get_context_data(self, **kwargs):
context = super(TitleMixin, self).get_context_data(**kwargs)
context['title'] = self.get_title()
return context
def get_title(self):
return self.title
class LoginRequiredMixin(object):
@classmethod
def as_view(cls, **initkwargs):
view = super(LoginRequiredMixin, cls).as_view(**initkwargs)
return login_required(view)
|
b3790a607a9f48561ce7a3da9242927510974808
|
packs/rackspace/actions/lib/action.py
|
packs/rackspace/actions/lib/action.py
|
from st2actions.runners.pythonrunner import Action
import pyrax
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region']
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
import pyrax
from st2actions.runners.pythonrunner import Action
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region'].upper()
print 'xxx', region
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
Make sure region name is uppercase.
|
Make sure region name is uppercase.
|
Python
|
apache-2.0
|
jtopjian/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,armab/st2contrib,StackStorm/st2contrib,pinterb/st2contrib,meirwah/st2contrib,psychopenguin/st2contrib,dennybaa/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,meirwah/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,jtopjian/st2contrib,armab/st2contrib,digideskio/st2contrib,StackStorm/st2contrib,psychopenguin/st2contrib,StackStorm/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,armab/st2contrib,digideskio/st2contrib,lmEshoo/st2contrib,pinterb/st2contrib,dennybaa/st2contrib
|
from st2actions.runners.pythonrunner import Action
import pyrax
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region']
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
Make sure region name is uppercase.
|
import pyrax
from st2actions.runners.pythonrunner import Action
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region'].upper()
print 'xxx', region
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
<commit_before>from st2actions.runners.pythonrunner import Action
import pyrax
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region']
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
<commit_msg>Make sure region name is uppercase.<commit_after>
|
import pyrax
from st2actions.runners.pythonrunner import Action
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region'].upper()
print 'xxx', region
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
from st2actions.runners.pythonrunner import Action
import pyrax
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region']
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
Make sure region name is uppercase.import pyrax
from st2actions.runners.pythonrunner import Action
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region'].upper()
print 'xxx', region
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
<commit_before>from st2actions.runners.pythonrunner import Action
import pyrax
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region']
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
<commit_msg>Make sure region name is uppercase.<commit_after>import pyrax
from st2actions.runners.pythonrunner import Action
__all__ = [
'PyraxBaseAction'
]
class PyraxBaseAction(Action):
def __init__(self, config):
super(PyraxBaseAction, self).__init__(config)
self.pyrax = self._get_client()
def _get_client(self):
username = self.config['username']
api_key = self.config['api_key']
# Needs to be extracted to per-action
region = self.config['region'].upper()
print 'xxx', region
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_default_region(region)
pyrax.set_credentials(username, api_key)
return pyrax
|
dff45c77bfd2dcb6a51fb259e2de1f14c2c7e587
|
euler005.py
|
euler005.py
|
#!/usr/bin/python
from math import pow, sqrt, log, floor
from functools import reduce
LIMIT = 21
"""
Python version of my old first attempt to solve this
"""
factors = [0] * LIMIT
for dividend in range (2, LIMIT):
x = dividend
for i in range (2,x):
factor = 2
while i > 1:
count = 0
while i % factor == 0:
i //= factor
count += 1
if count > factors[factor]:
factors[factor] = count
factor += 1
result = 1
for j in range (2, LIMIT):
result *= pow (j, factors[j])
print (int (result))
"""
Second version, using a better approch
"""
def isPrime (x):
if x % 2 == 0:
return 0
test = range (3, int(sqrt (x)) + 1, 2)
for i in test:
if x % i == 0:
return 0
return 1
result = 1
result *= pow ( 2, floor(log (LIMIT - 1) / log (2)))
for i in range (3, LIMIT, 2):
if isPrime (i):
result *= pow (i, floor (log (LIMIT - 1) / log (i)))
print (int (result))
"""
Beautiful solution, from user EyeCon, found in projecteuler.net
problem 5 forum. This is really interesting and useful for understand
the possibilities of Python
"""
def gcd(a, b):
while(b != 0):
a, b = b, a%b
return a
def lcm(a,b):
return a * b / gcd(a, b)
print (int(reduce(lcm, range(2, 21))))
|
Add solutions for problem 5
|
Add solutions for problem 5
|
Python
|
mit
|
cifvts/PyEuler
|
Add solutions for problem 5
|
#!/usr/bin/python
from math import pow, sqrt, log, floor
from functools import reduce
LIMIT = 21
"""
Python version of my old first attempt to solve this
"""
factors = [0] * LIMIT
for dividend in range (2, LIMIT):
x = dividend
for i in range (2,x):
factor = 2
while i > 1:
count = 0
while i % factor == 0:
i //= factor
count += 1
if count > factors[factor]:
factors[factor] = count
factor += 1
result = 1
for j in range (2, LIMIT):
result *= pow (j, factors[j])
print (int (result))
"""
Second version, using a better approch
"""
def isPrime (x):
if x % 2 == 0:
return 0
test = range (3, int(sqrt (x)) + 1, 2)
for i in test:
if x % i == 0:
return 0
return 1
result = 1
result *= pow ( 2, floor(log (LIMIT - 1) / log (2)))
for i in range (3, LIMIT, 2):
if isPrime (i):
result *= pow (i, floor (log (LIMIT - 1) / log (i)))
print (int (result))
"""
Beautiful solution, from user EyeCon, found in projecteuler.net
problem 5 forum. This is really interesting and useful for understand
the possibilities of Python
"""
def gcd(a, b):
while(b != 0):
a, b = b, a%b
return a
def lcm(a,b):
return a * b / gcd(a, b)
print (int(reduce(lcm, range(2, 21))))
|
<commit_before><commit_msg>Add solutions for problem 5<commit_after>
|
#!/usr/bin/python
from math import pow, sqrt, log, floor
from functools import reduce
LIMIT = 21
"""
Python version of my old first attempt to solve this
"""
factors = [0] * LIMIT
for dividend in range (2, LIMIT):
x = dividend
for i in range (2,x):
factor = 2
while i > 1:
count = 0
while i % factor == 0:
i //= factor
count += 1
if count > factors[factor]:
factors[factor] = count
factor += 1
result = 1
for j in range (2, LIMIT):
result *= pow (j, factors[j])
print (int (result))
"""
Second version, using a better approch
"""
def isPrime (x):
if x % 2 == 0:
return 0
test = range (3, int(sqrt (x)) + 1, 2)
for i in test:
if x % i == 0:
return 0
return 1
result = 1
result *= pow ( 2, floor(log (LIMIT - 1) / log (2)))
for i in range (3, LIMIT, 2):
if isPrime (i):
result *= pow (i, floor (log (LIMIT - 1) / log (i)))
print (int (result))
"""
Beautiful solution, from user EyeCon, found in projecteuler.net
problem 5 forum. This is really interesting and useful for understand
the possibilities of Python
"""
def gcd(a, b):
while(b != 0):
a, b = b, a%b
return a
def lcm(a,b):
return a * b / gcd(a, b)
print (int(reduce(lcm, range(2, 21))))
|
Add solutions for problem 5#!/usr/bin/python
from math import pow, sqrt, log, floor
from functools import reduce
LIMIT = 21
"""
Python version of my old first attempt to solve this
"""
factors = [0] * LIMIT
for dividend in range (2, LIMIT):
x = dividend
for i in range (2,x):
factor = 2
while i > 1:
count = 0
while i % factor == 0:
i //= factor
count += 1
if count > factors[factor]:
factors[factor] = count
factor += 1
result = 1
for j in range (2, LIMIT):
result *= pow (j, factors[j])
print (int (result))
"""
Second version, using a better approch
"""
def isPrime (x):
if x % 2 == 0:
return 0
test = range (3, int(sqrt (x)) + 1, 2)
for i in test:
if x % i == 0:
return 0
return 1
result = 1
result *= pow ( 2, floor(log (LIMIT - 1) / log (2)))
for i in range (3, LIMIT, 2):
if isPrime (i):
result *= pow (i, floor (log (LIMIT - 1) / log (i)))
print (int (result))
"""
Beautiful solution, from user EyeCon, found in projecteuler.net
problem 5 forum. This is really interesting and useful for understand
the possibilities of Python
"""
def gcd(a, b):
while(b != 0):
a, b = b, a%b
return a
def lcm(a,b):
return a * b / gcd(a, b)
print (int(reduce(lcm, range(2, 21))))
|
<commit_before><commit_msg>Add solutions for problem 5<commit_after>#!/usr/bin/python
from math import pow, sqrt, log, floor
from functools import reduce
LIMIT = 21
"""
Python version of my old first attempt to solve this
"""
factors = [0] * LIMIT
for dividend in range (2, LIMIT):
x = dividend
for i in range (2,x):
factor = 2
while i > 1:
count = 0
while i % factor == 0:
i //= factor
count += 1
if count > factors[factor]:
factors[factor] = count
factor += 1
result = 1
for j in range (2, LIMIT):
result *= pow (j, factors[j])
print (int (result))
"""
Second version, using a better approch
"""
def isPrime (x):
if x % 2 == 0:
return 0
test = range (3, int(sqrt (x)) + 1, 2)
for i in test:
if x % i == 0:
return 0
return 1
result = 1
result *= pow ( 2, floor(log (LIMIT - 1) / log (2)))
for i in range (3, LIMIT, 2):
if isPrime (i):
result *= pow (i, floor (log (LIMIT - 1) / log (i)))
print (int (result))
"""
Beautiful solution, from user EyeCon, found in projecteuler.net
problem 5 forum. This is really interesting and useful for understand
the possibilities of Python
"""
def gcd(a, b):
while(b != 0):
a, b = b, a%b
return a
def lcm(a,b):
return a * b / gcd(a, b)
print (int(reduce(lcm, range(2, 21))))
|
|
7f41195e85c468f495beb320bf574e664b17fb12
|
scripts/osfstorage/migrate_metadata.py
|
scripts/osfstorage/migrate_metadata.py
|
# -*- coding: utf-8 -*-
"""Script which ensures that every file version's
content_type, size, and date_modified fields are consistent
with the metadata from waterbutler.
"""
from website.addons.osfstorage.model import OsfStorageFileVersion
def main():
for each in OsfStorageFileVersion.find():
each.update_metadata(each.metadata)
if __name__ == '__main__':
main()
|
Add migration script to ensure metadata fields are set
|
Add migration script to ensure metadata fields are set
|
Python
|
apache-2.0
|
leb2dg/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,GaryKriebel/osf.io,reinaH/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,billyhunt/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,danielneis/osf.io,zamattiac/osf.io,felliott/osf.io,chennan47/osf.io,TomBaxter/osf.io,icereval/osf.io,emetsger/osf.io,Nesiehr/osf.io,kushG/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,doublebits/osf.io,SSJohns/osf.io,jinluyuan/osf.io,chrisseto/osf.io,mluke93/osf.io,wearpants/osf.io,danielneis/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,jmcarp/osf.io,TomHeatwole/osf.io,HalcyonChimera/osf.io,revanthkolli/osf.io,erinspace/osf.io,kwierman/osf.io,caneruguz/osf.io,HarryRybacki/osf.io,cslzchen/osf.io,zkraime/osf.io,asanfilippo7/osf.io,doublebits/osf.io,ticklemepierce/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,bdyetton/prettychart,abought/osf.io,sloria/osf.io,reinaH/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,aaxelb/osf.io,crcresearch/osf.io,Ghalko/osf.io,Nesiehr/osf.io,lyndsysimon/osf.io,amyshi188/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,samanehsan/osf.io,kushG/osf.io,RomanZWang/osf.io,doublebits/osf.io,erinspace/osf.io,TomBaxter/osf.io,crcresearch/osf.io,rdhyee/osf.io,arpitar/osf.io,adlius/osf.io,leb2dg/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,sbt9uc/osf.io,kushG/osf.io,himanshuo/osf.io,leb2dg/osf.io,petermalcolm/osf.io,mfraezz/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,mattclark/osf.io,cslzchen/osf.io,SSJohns/osf.io,cldershem/osf.io,CenterForOpenScience/osf.io,revanthkolli/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,njantrania/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,saradbowman/osf.io,kch8qx/osf.io,mluo613/osf.io,bdyetton/prettychart,saradbowman/osf.io,jnayak1/osf.io,wearpants/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,abought/osf.io,felliott/osf.io,zkraime/osf.io,cslzchen/osf.io,sbt9uc/osf.io,binoculars/osf.io,cosenal/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,jinluyuan/osf.io,GaryKriebel/osf.io,cosenal/osf.io,lyndsysimon/osf.io,danielneis/osf.io,mfraezz/osf.io,hmoco/osf.io,billyhunt/osf.io,lamdnhan/osf.io,TomHeatwole/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,himanshuo/osf.io,RomanZWang/osf.io,doublebits/osf.io,laurenrevere/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,jolene-esposito/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,mattclark/osf.io,emetsger/osf.io,RomanZWang/osf.io,barbour-em/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,caseyrygt/osf.io,petermalcolm/osf.io,njantrania/osf.io,alexschiller/osf.io,felliott/osf.io,himanshuo/osf.io,petermalcolm/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,doublebits/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,barbour-em/osf.io,zkraime/osf.io,rdhyee/osf.io,barbour-em/osf.io,sloria/osf.io,ZobairAlijan/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,erinspace/osf.io,jinluyuan/osf.io,dplorimer/osf,arpitar/osf.io,barbour-em/osf.io,alexschiller/osf.io,dplorimer/osf,Ghalko/osf.io,amyshi188/osf.io,SSJohns/osf.io,caseyrygt/osf.io,kwierman/osf.io,fabianvf/osf.io,MerlinZhang/osf.io,samchrisinger/osf.io,dplorimer/osf,GageGaskins/osf.io,himanshuo/osf.io,zachjanicki/osf.io,dplorimer/osf,mattclark/osf.io,adlius/osf.io,mluo613/osf.io,mluke93/osf.io,reinaH/osf.io,petermalcolm/osf.io,chennan47/osf.io,kch8qx/osf.io,revanthkolli/osf.io,abought/osf.io,samchrisinger/osf.io,arpitar/osf.io,bdyetton/prettychart,rdhyee/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,hmoco/osf.io,ZobairAlijan/osf.io,fabianvf/osf.io,zamattiac/osf.io,zkraime/osf.io,jolene-esposito/osf.io,mluo613/osf.io,samanehsan/osf.io,billyhunt/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,chennan47/osf.io,fabianvf/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,ckc6cz/osf.io,jeffreyliu3230/osf.io,jmcarp/osf.io,cldershem/osf.io,zachjanicki/osf.io,mfraezz/osf.io,acshi/osf.io,cosenal/osf.io,GageGaskins/osf.io,jolene-esposito/osf.io,lyndsysimon/osf.io,jolene-esposito/osf.io,caseyrygt/osf.io,mluke93/osf.io,GaryKriebel/osf.io,lamdnhan/osf.io,kushG/osf.io,jmcarp/osf.io,RomanZWang/osf.io,cslzchen/osf.io,arpitar/osf.io,lamdnhan/osf.io,cldershem/osf.io,ckc6cz/osf.io,hmoco/osf.io,emetsger/osf.io,adlius/osf.io,wearpants/osf.io,HarryRybacki/osf.io,aaxelb/osf.io,mluke93/osf.io,samanehsan/osf.io,icereval/osf.io,aaxelb/osf.io,jnayak1/osf.io,zamattiac/osf.io,caseyrygt/osf.io,cldershem/osf.io,reinaH/osf.io,sloria/osf.io,mfraezz/osf.io,cosenal/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,jinluyuan/osf.io,KAsante95/osf.io,njantrania/osf.io,zamattiac/osf.io,kch8qx/osf.io,abought/osf.io,ckc6cz/osf.io,hmoco/osf.io,lamdnhan/osf.io,mluo613/osf.io,emetsger/osf.io,brandonPurvis/osf.io,jeffreyliu3230/osf.io,caseyrollins/osf.io,jeffreyliu3230/osf.io,GageGaskins/osf.io,acshi/osf.io,amyshi188/osf.io,Nesiehr/osf.io,billyhunt/osf.io,zachjanicki/osf.io,Ghalko/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,icereval/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,cwisecarver/osf.io,binoculars/osf.io,mluo613/osf.io,revanthkolli/osf.io,caneruguz/osf.io,njantrania/osf.io,kwierman/osf.io,haoyuchen1992/osf.io,felliott/osf.io,caneruguz/osf.io,bdyetton/prettychart,adlius/osf.io,KAsante95/osf.io,alexschiller/osf.io,acshi/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,acshi/osf.io,danielneis/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,KAsante95/osf.io,HarryRybacki/osf.io,jnayak1/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,fabianvf/osf.io,acshi/osf.io,monikagrabowska/osf.io,wearpants/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,Ghalko/osf.io,binoculars/osf.io,laurenrevere/osf.io
|
Add migration script to ensure metadata fields are set
|
# -*- coding: utf-8 -*-
"""Script which ensures that every file version's
content_type, size, and date_modified fields are consistent
with the metadata from waterbutler.
"""
from website.addons.osfstorage.model import OsfStorageFileVersion
def main():
for each in OsfStorageFileVersion.find():
each.update_metadata(each.metadata)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration script to ensure metadata fields are set<commit_after>
|
# -*- coding: utf-8 -*-
"""Script which ensures that every file version's
content_type, size, and date_modified fields are consistent
with the metadata from waterbutler.
"""
from website.addons.osfstorage.model import OsfStorageFileVersion
def main():
for each in OsfStorageFileVersion.find():
each.update_metadata(each.metadata)
if __name__ == '__main__':
main()
|
Add migration script to ensure metadata fields are set# -*- coding: utf-8 -*-
"""Script which ensures that every file version's
content_type, size, and date_modified fields are consistent
with the metadata from waterbutler.
"""
from website.addons.osfstorage.model import OsfStorageFileVersion
def main():
for each in OsfStorageFileVersion.find():
each.update_metadata(each.metadata)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration script to ensure metadata fields are set<commit_after># -*- coding: utf-8 -*-
"""Script which ensures that every file version's
content_type, size, and date_modified fields are consistent
with the metadata from waterbutler.
"""
from website.addons.osfstorage.model import OsfStorageFileVersion
def main():
for each in OsfStorageFileVersion.find():
each.update_metadata(each.metadata)
if __name__ == '__main__':
main()
|
|
625e2335878b711ce16901b1beb378e74747f7ab
|
tools/dev/mklog.py
|
tools/dev/mklog.py
|
#!/usr/bin/env python
#
# Read a diff from stdin, and output a log message template to stdout.
# Hint: It helps if the diff was generated using 'svn diff -x -p'
#
# Note: Don't completely trust the generated log message. This script
# depends on the correct output of 'diff -x -p', which can sometimes get
# confused.
import sys, re
rm = re.compile('@@.*@@ (.*)\(.*$')
def main():
for line in sys.stdin:
if line[0:6] == 'Index:':
print('\n* %s' % line[7:-1])
prev_funcname = ''
continue
match = rm.search(line[:-1])
if match:
if prev_funcname == match.group(1):
continue
print(' (%s):' % match.group(1))
prev_funcname = match.group(1)
if __name__ == '__main__':
main()
|
Add a new script which can potentially be used to generate log message templates in the Subversion-approved format.
|
Add a new script which can potentially be used to generate log message
templates in the Subversion-approved format.
It is currently really stupid; I'm committing it in the hopes that others
find it useful and can improve upon it.
* tools/dev/mklog.py:
New.
|
Python
|
apache-2.0
|
jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion
|
Add a new script which can potentially be used to generate log message
templates in the Subversion-approved format.
It is currently really stupid; I'm committing it in the hopes that others
find it useful and can improve upon it.
* tools/dev/mklog.py:
New.
|
#!/usr/bin/env python
#
# Read a diff from stdin, and output a log message template to stdout.
# Hint: It helps if the diff was generated using 'svn diff -x -p'
#
# Note: Don't completely trust the generated log message. This script
# depends on the correct output of 'diff -x -p', which can sometimes get
# confused.
import sys, re
rm = re.compile('@@.*@@ (.*)\(.*$')
def main():
for line in sys.stdin:
if line[0:6] == 'Index:':
print('\n* %s' % line[7:-1])
prev_funcname = ''
continue
match = rm.search(line[:-1])
if match:
if prev_funcname == match.group(1):
continue
print(' (%s):' % match.group(1))
prev_funcname = match.group(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a new script which can potentially be used to generate log message
templates in the Subversion-approved format.
It is currently really stupid; I'm committing it in the hopes that others
find it useful and can improve upon it.
* tools/dev/mklog.py:
New.<commit_after>
|
#!/usr/bin/env python
#
# Read a diff from stdin, and output a log message template to stdout.
# Hint: It helps if the diff was generated using 'svn diff -x -p'
#
# Note: Don't completely trust the generated log message. This script
# depends on the correct output of 'diff -x -p', which can sometimes get
# confused.
import sys, re
rm = re.compile('@@.*@@ (.*)\(.*$')
def main():
for line in sys.stdin:
if line[0:6] == 'Index:':
print('\n* %s' % line[7:-1])
prev_funcname = ''
continue
match = rm.search(line[:-1])
if match:
if prev_funcname == match.group(1):
continue
print(' (%s):' % match.group(1))
prev_funcname = match.group(1)
if __name__ == '__main__':
main()
|
Add a new script which can potentially be used to generate log message
templates in the Subversion-approved format.
It is currently really stupid; I'm committing it in the hopes that others
find it useful and can improve upon it.
* tools/dev/mklog.py:
New.#!/usr/bin/env python
#
# Read a diff from stdin, and output a log message template to stdout.
# Hint: It helps if the diff was generated using 'svn diff -x -p'
#
# Note: Don't completely trust the generated log message. This script
# depends on the correct output of 'diff -x -p', which can sometimes get
# confused.
import sys, re
rm = re.compile('@@.*@@ (.*)\(.*$')
def main():
for line in sys.stdin:
if line[0:6] == 'Index:':
print('\n* %s' % line[7:-1])
prev_funcname = ''
continue
match = rm.search(line[:-1])
if match:
if prev_funcname == match.group(1):
continue
print(' (%s):' % match.group(1))
prev_funcname = match.group(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a new script which can potentially be used to generate log message
templates in the Subversion-approved format.
It is currently really stupid; I'm committing it in the hopes that others
find it useful and can improve upon it.
* tools/dev/mklog.py:
New.<commit_after>#!/usr/bin/env python
#
# Read a diff from stdin, and output a log message template to stdout.
# Hint: It helps if the diff was generated using 'svn diff -x -p'
#
# Note: Don't completely trust the generated log message. This script
# depends on the correct output of 'diff -x -p', which can sometimes get
# confused.
import sys, re
rm = re.compile('@@.*@@ (.*)\(.*$')
def main():
for line in sys.stdin:
if line[0:6] == 'Index:':
print('\n* %s' % line[7:-1])
prev_funcname = ''
continue
match = rm.search(line[:-1])
if match:
if prev_funcname == match.group(1):
continue
print(' (%s):' % match.group(1))
prev_funcname = match.group(1)
if __name__ == '__main__':
main()
|
|
16144d2edf80d634182e1ff185dc4f39e467871d
|
st2common/tests/unit/test_util_payload.py
|
st2common/tests/unit/test_util_payload.py
|
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util.payload import PayloadLookup
__all__ = [
'PayloadLookupTestCase'
]
class PayloadLookupTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.payload = PayloadLookup({
'pikachu': "Has no ears",
'charmander': "Plays with fire",
})
super(PayloadLookupTestCase, cls).setUpClass()
def test_get_key(self):
self.assertEqual(self.payload.get_value('trigger.pikachu'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander'), ["Plays with fire"])
def test_explicitly_get_multiple_keys(self):
self.assertEqual(self.payload.get_value('trigger.pikachu[*]'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander[*]'), ["Plays with fire"])
def test_get_nonexistent_key(self):
self.assertIsNone(self.payload.get_value('trigger.squirtle'))
|
Add tests for new payload module
|
Add tests for new payload module
|
Python
|
apache-2.0
|
nzlosh/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2
|
Add tests for new payload module
|
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util.payload import PayloadLookup
__all__ = [
'PayloadLookupTestCase'
]
class PayloadLookupTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.payload = PayloadLookup({
'pikachu': "Has no ears",
'charmander': "Plays with fire",
})
super(PayloadLookupTestCase, cls).setUpClass()
def test_get_key(self):
self.assertEqual(self.payload.get_value('trigger.pikachu'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander'), ["Plays with fire"])
def test_explicitly_get_multiple_keys(self):
self.assertEqual(self.payload.get_value('trigger.pikachu[*]'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander[*]'), ["Plays with fire"])
def test_get_nonexistent_key(self):
self.assertIsNone(self.payload.get_value('trigger.squirtle'))
|
<commit_before><commit_msg>Add tests for new payload module<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util.payload import PayloadLookup
__all__ = [
'PayloadLookupTestCase'
]
class PayloadLookupTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.payload = PayloadLookup({
'pikachu': "Has no ears",
'charmander': "Plays with fire",
})
super(PayloadLookupTestCase, cls).setUpClass()
def test_get_key(self):
self.assertEqual(self.payload.get_value('trigger.pikachu'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander'), ["Plays with fire"])
def test_explicitly_get_multiple_keys(self):
self.assertEqual(self.payload.get_value('trigger.pikachu[*]'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander[*]'), ["Plays with fire"])
def test_get_nonexistent_key(self):
self.assertIsNone(self.payload.get_value('trigger.squirtle'))
|
Add tests for new payload module# -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util.payload import PayloadLookup
__all__ = [
'PayloadLookupTestCase'
]
class PayloadLookupTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.payload = PayloadLookup({
'pikachu': "Has no ears",
'charmander': "Plays with fire",
})
super(PayloadLookupTestCase, cls).setUpClass()
def test_get_key(self):
self.assertEqual(self.payload.get_value('trigger.pikachu'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander'), ["Plays with fire"])
def test_explicitly_get_multiple_keys(self):
self.assertEqual(self.payload.get_value('trigger.pikachu[*]'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander[*]'), ["Plays with fire"])
def test_get_nonexistent_key(self):
self.assertIsNone(self.payload.get_value('trigger.squirtle'))
|
<commit_before><commit_msg>Add tests for new payload module<commit_after># -*- coding: utf-8 -*-
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util.payload import PayloadLookup
__all__ = [
'PayloadLookupTestCase'
]
class PayloadLookupTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls.payload = PayloadLookup({
'pikachu': "Has no ears",
'charmander': "Plays with fire",
})
super(PayloadLookupTestCase, cls).setUpClass()
def test_get_key(self):
self.assertEqual(self.payload.get_value('trigger.pikachu'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander'), ["Plays with fire"])
def test_explicitly_get_multiple_keys(self):
self.assertEqual(self.payload.get_value('trigger.pikachu[*]'), ["Has no ears"])
self.assertEqual(self.payload.get_value('trigger.charmander[*]'), ["Plays with fire"])
def test_get_nonexistent_key(self):
self.assertIsNone(self.payload.get_value('trigger.squirtle'))
|
|
2038e57a88ceebf35ec8196c12d0e075bdf98003
|
buedafab/deploy/utils.py
|
buedafab/deploy/utils.py
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.package_installation_scripts:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.extra_deploy_tasks:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
Fix variable we're looking for.
|
Fix variable we're looking for.
|
Python
|
mit
|
bueda/ops,alexmerser/ops,madteckhead/ops,spbrien/bueda-ops
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.package_installation_scripts:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
Fix variable we're looking for.
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.extra_deploy_tasks:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
<commit_before>"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.package_installation_scripts:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
<commit_msg>Fix variable we're looking for.<commit_after>
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.extra_deploy_tasks:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.package_installation_scripts:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
Fix variable we're looking for."""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.extra_deploy_tasks:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
<commit_before>"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.package_installation_scripts:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
<commit_msg>Fix variable we're looking for.<commit_after>"""General deployment utilities (not Fabric commands)."""
from fabric.api import cd, require, local, env
from buedafab import deploy
def make_archive():
"""Create a compressed archive of the project's repository, complete with
submodules.
TODO We used to used git-archive-all to archive the submodules as well,
since 'git archive' doesn't touch them. We reverted back at some point and
stopped using archives in our deployment strategy, so this may not work with
submodules.
"""
require('release')
require('scratch_path')
with cd(env.scratch_path):
deploy.release.make_pretty_release()
local('git checkout %(release)s' % env)
local('git submodule update --init')
local('git archive --prefix=%(unit)s/ --format tar '
'%(release)s | gzip > %(scratch_path)s/%(archive)s' % env)
def run_extra_deploy_tasks(deployed=False):
"""Run arbitrary functions listed in env.package_installation_scripts.
Each function must accept a single parameter (or just kwargs) that will
indicates if the app was deployed or already existed.
"""
require('release_path')
if not env.extra_deploy_tasks:
return
with cd(env.release_path):
for task in env.extra_deploy_tasks:
task(deployed)
|
7031126f3839a389b16d48404e5ad085d815fe83
|
py/solve-the-equation.py
|
py/solve-the-equation.py
|
import re
class Solution(object):
def solveEquation(self, equation):
"""
:type equation: str
:rtype: str
"""
lhs, rhs = equation.split('=')
lcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', lhs))
rcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', rhs))
lconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', lhs))
rconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', rhs))
print lcoef, rcoef, lconst, rconst
if lcoef == rcoef:
if lconst == rconst:
return "Infinite solutions"
else:
return "No solution"
else:
return 'x={ans}'.format(ans=(lconst - rconst) / (rcoef - lcoef))
|
Add py solution for 640. Solve the Equation
|
Add py solution for 640. Solve the Equation
640. Solve the Equation: https://leetcode.com/problems/solve-the-equation/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 640. Solve the Equation
640. Solve the Equation: https://leetcode.com/problems/solve-the-equation/
|
import re
class Solution(object):
def solveEquation(self, equation):
"""
:type equation: str
:rtype: str
"""
lhs, rhs = equation.split('=')
lcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', lhs))
rcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', rhs))
lconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', lhs))
rconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', rhs))
print lcoef, rcoef, lconst, rconst
if lcoef == rcoef:
if lconst == rconst:
return "Infinite solutions"
else:
return "No solution"
else:
return 'x={ans}'.format(ans=(lconst - rconst) / (rcoef - lcoef))
|
<commit_before><commit_msg>Add py solution for 640. Solve the Equation
640. Solve the Equation: https://leetcode.com/problems/solve-the-equation/<commit_after>
|
import re
class Solution(object):
def solveEquation(self, equation):
"""
:type equation: str
:rtype: str
"""
lhs, rhs = equation.split('=')
lcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', lhs))
rcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', rhs))
lconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', lhs))
rconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', rhs))
print lcoef, rcoef, lconst, rconst
if lcoef == rcoef:
if lconst == rconst:
return "Infinite solutions"
else:
return "No solution"
else:
return 'x={ans}'.format(ans=(lconst - rconst) / (rcoef - lcoef))
|
Add py solution for 640. Solve the Equation
640. Solve the Equation: https://leetcode.com/problems/solve-the-equation/import re
class Solution(object):
def solveEquation(self, equation):
"""
:type equation: str
:rtype: str
"""
lhs, rhs = equation.split('=')
lcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', lhs))
rcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', rhs))
lconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', lhs))
rconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', rhs))
print lcoef, rcoef, lconst, rconst
if lcoef == rcoef:
if lconst == rconst:
return "Infinite solutions"
else:
return "No solution"
else:
return 'x={ans}'.format(ans=(lconst - rconst) / (rcoef - lcoef))
|
<commit_before><commit_msg>Add py solution for 640. Solve the Equation
640. Solve the Equation: https://leetcode.com/problems/solve-the-equation/<commit_after>import re
class Solution(object):
def solveEquation(self, equation):
"""
:type equation: str
:rtype: str
"""
lhs, rhs = equation.split('=')
lcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', lhs))
rcoef = sum(int(m + "1" if m in ["-", "+", ""] else m) for m in re.findall(r'([+-]?\d*)x', rhs))
lconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', lhs))
rconst = sum(int(m) for m in re.findall(r'([+-]?\d+)(?![0-9x])', rhs))
print lcoef, rcoef, lconst, rconst
if lcoef == rcoef:
if lconst == rconst:
return "Infinite solutions"
else:
return "No solution"
else:
return 'x={ans}'.format(ans=(lconst - rconst) / (rcoef - lcoef))
|
|
02eaba1a420edddf72ed27a302dfb1314f9501e5
|
tests/test_config.py
|
tests/test_config.py
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import angus.client
__updated__ = "2017-08-08"
__author__ = "Aurélien Moreau"
__copyright__ = "Copyright 2015-2017, Angus.ai"
__credits__ = ["Aurélien Moreau", "Gwennael Gate"]
__license__ = "Apache v2.0"
__maintainer__ = "Aurélien Moreau"
__status__ = "Production"
def test_connect():
angus.client.connect()
|
Add very simple test for configuration process
|
Add very simple test for configuration process
|
Python
|
apache-2.0
|
angus-ai/angus-sdk-python
|
Add very simple test for configuration process
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import angus.client
__updated__ = "2017-08-08"
__author__ = "Aurélien Moreau"
__copyright__ = "Copyright 2015-2017, Angus.ai"
__credits__ = ["Aurélien Moreau", "Gwennael Gate"]
__license__ = "Apache v2.0"
__maintainer__ = "Aurélien Moreau"
__status__ = "Production"
def test_connect():
angus.client.connect()
|
<commit_before><commit_msg>Add very simple test for configuration process<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import angus.client
__updated__ = "2017-08-08"
__author__ = "Aurélien Moreau"
__copyright__ = "Copyright 2015-2017, Angus.ai"
__credits__ = ["Aurélien Moreau", "Gwennael Gate"]
__license__ = "Apache v2.0"
__maintainer__ = "Aurélien Moreau"
__status__ = "Production"
def test_connect():
angus.client.connect()
|
Add very simple test for configuration process# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import angus.client
__updated__ = "2017-08-08"
__author__ = "Aurélien Moreau"
__copyright__ = "Copyright 2015-2017, Angus.ai"
__credits__ = ["Aurélien Moreau", "Gwennael Gate"]
__license__ = "Apache v2.0"
__maintainer__ = "Aurélien Moreau"
__status__ = "Production"
def test_connect():
angus.client.connect()
|
<commit_before><commit_msg>Add very simple test for configuration process<commit_after># -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import angus.client
__updated__ = "2017-08-08"
__author__ = "Aurélien Moreau"
__copyright__ = "Copyright 2015-2017, Angus.ai"
__credits__ = ["Aurélien Moreau", "Gwennael Gate"]
__license__ = "Apache v2.0"
__maintainer__ = "Aurélien Moreau"
__status__ = "Production"
def test_connect():
angus.client.connect()
|
|
c477427a35a0edaf90f53cf6b9e4cf33d5a4f0cc
|
tests/test_remote.py
|
tests/test_remote.py
|
# Empty test file just for coverage / syntax-checking purposes
# flake8: noqa
import nbdiff.server.remote_server
import nbdiff.server.database
import nbdiff.server.command.AboutUsCommand
import nbdiff.server.command.ComparisonCommand
import nbdiff.server.command.ContactUsCommand
import nbdiff.server.command.DiffCommand
import nbdiff.server.command.DiffURLCommand
import nbdiff.server.command.FaqCommand
import nbdiff.server.command.MergeCommand
import nbdiff.server.command.MergeURLCommand
import nbdiff.server.command.NotebookRequestCommand
import nbdiff.server.command.ResourceRequestCommand
import nbdiff.server.command.SaveNotebookCommand
import nbdiff.server.command.UploadCommand
|
Add empty test file that imports all remote command files for syntax-checking/coverage reporting.
|
Add empty test file that imports all remote command files for syntax-checking/coverage reporting.
|
Python
|
mit
|
tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff,tarmstrong/nbdiff
|
Add empty test file that imports all remote command files for syntax-checking/coverage reporting.
|
# Empty test file just for coverage / syntax-checking purposes
# flake8: noqa
import nbdiff.server.remote_server
import nbdiff.server.database
import nbdiff.server.command.AboutUsCommand
import nbdiff.server.command.ComparisonCommand
import nbdiff.server.command.ContactUsCommand
import nbdiff.server.command.DiffCommand
import nbdiff.server.command.DiffURLCommand
import nbdiff.server.command.FaqCommand
import nbdiff.server.command.MergeCommand
import nbdiff.server.command.MergeURLCommand
import nbdiff.server.command.NotebookRequestCommand
import nbdiff.server.command.ResourceRequestCommand
import nbdiff.server.command.SaveNotebookCommand
import nbdiff.server.command.UploadCommand
|
<commit_before><commit_msg>Add empty test file that imports all remote command files for syntax-checking/coverage reporting.<commit_after>
|
# Empty test file just for coverage / syntax-checking purposes
# flake8: noqa
import nbdiff.server.remote_server
import nbdiff.server.database
import nbdiff.server.command.AboutUsCommand
import nbdiff.server.command.ComparisonCommand
import nbdiff.server.command.ContactUsCommand
import nbdiff.server.command.DiffCommand
import nbdiff.server.command.DiffURLCommand
import nbdiff.server.command.FaqCommand
import nbdiff.server.command.MergeCommand
import nbdiff.server.command.MergeURLCommand
import nbdiff.server.command.NotebookRequestCommand
import nbdiff.server.command.ResourceRequestCommand
import nbdiff.server.command.SaveNotebookCommand
import nbdiff.server.command.UploadCommand
|
Add empty test file that imports all remote command files for syntax-checking/coverage reporting.# Empty test file just for coverage / syntax-checking purposes
# flake8: noqa
import nbdiff.server.remote_server
import nbdiff.server.database
import nbdiff.server.command.AboutUsCommand
import nbdiff.server.command.ComparisonCommand
import nbdiff.server.command.ContactUsCommand
import nbdiff.server.command.DiffCommand
import nbdiff.server.command.DiffURLCommand
import nbdiff.server.command.FaqCommand
import nbdiff.server.command.MergeCommand
import nbdiff.server.command.MergeURLCommand
import nbdiff.server.command.NotebookRequestCommand
import nbdiff.server.command.ResourceRequestCommand
import nbdiff.server.command.SaveNotebookCommand
import nbdiff.server.command.UploadCommand
|
<commit_before><commit_msg>Add empty test file that imports all remote command files for syntax-checking/coverage reporting.<commit_after># Empty test file just for coverage / syntax-checking purposes
# flake8: noqa
import nbdiff.server.remote_server
import nbdiff.server.database
import nbdiff.server.command.AboutUsCommand
import nbdiff.server.command.ComparisonCommand
import nbdiff.server.command.ContactUsCommand
import nbdiff.server.command.DiffCommand
import nbdiff.server.command.DiffURLCommand
import nbdiff.server.command.FaqCommand
import nbdiff.server.command.MergeCommand
import nbdiff.server.command.MergeURLCommand
import nbdiff.server.command.NotebookRequestCommand
import nbdiff.server.command.ResourceRequestCommand
import nbdiff.server.command.SaveNotebookCommand
import nbdiff.server.command.UploadCommand
|
|
bbb156a9abce7c9210a21f3c4875833261ff7407
|
scripts/set_alpha.py
|
scripts/set_alpha.py
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
Add script to set angle of attack
|
Add script to set angle of attack
|
Python
|
mit
|
petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam
|
Add script to set angle of attack
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
<commit_before><commit_msg>Add script to set angle of attack<commit_after>
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
Add script to set angle of attack#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
<commit_before><commit_msg>Add script to set angle of attack<commit_after>#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
|
a4341da7e35b95907436dfa557139dccbb03d962
|
examples/red_green.py
|
examples/red_green.py
|
# PS Move API
# Copyright (c) 2011 Thomas Perl <thp.io/about>
# All Rights Reserved
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'build'))
import time
import math
import psmove
move = psmove.PSMove()
i = 0
while True:
r = int(128+128*math.sin(i))
move.set_leds(r, 255-r, 0)
move.update_leds()
time.sleep(.1)
i += .2
|
Add a simple Python example
|
Add a simple Python example
|
Python
|
bsd-2-clause
|
fourks/moveonpc,Hazer/moveonpc,seanjensengrey/moveonpc,cosmo1911/moveonpc,bab178/moveonpc,Zer01neDev/moveonpc,merryLee/moveonpc,Zer01neDev/moveonpc,fourks/moveonpc,bab178/moveonpc,merryLee/moveonpc,cosmo1911/moveonpc,seanjensengrey/moveonpc,cosmo1911/moveonpc,fourks/moveonpc,merryLee/moveonpc,seanjensengrey/moveonpc,seanjensengrey/moveonpc,Zer01neDev/moveonpc,cosmo1911/moveonpc,merryLee/moveonpc,seanjensengrey/moveonpc,bab178/moveonpc,Hazer/moveonpc,Hazer/moveonpc,Hazer/moveonpc,Hazer/moveonpc,cosmo1911/moveonpc,cosmo1911/moveonpc,bab178/moveonpc,bab178/moveonpc,Zer01neDev/moveonpc,Zer01neDev/moveonpc,merryLee/moveonpc,seanjensengrey/moveonpc,cosmo1911/moveonpc,merryLee/moveonpc,fourks/moveonpc,bab178/moveonpc,Zer01neDev/moveonpc,merryLee/moveonpc,Zer01neDev/moveonpc,fourks/moveonpc,fourks/moveonpc,fourks/moveonpc,Hazer/moveonpc,Hazer/moveonpc,seanjensengrey/moveonpc,bab178/moveonpc
|
Add a simple Python example
|
# PS Move API
# Copyright (c) 2011 Thomas Perl <thp.io/about>
# All Rights Reserved
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'build'))
import time
import math
import psmove
move = psmove.PSMove()
i = 0
while True:
r = int(128+128*math.sin(i))
move.set_leds(r, 255-r, 0)
move.update_leds()
time.sleep(.1)
i += .2
|
<commit_before><commit_msg>Add a simple Python example<commit_after>
|
# PS Move API
# Copyright (c) 2011 Thomas Perl <thp.io/about>
# All Rights Reserved
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'build'))
import time
import math
import psmove
move = psmove.PSMove()
i = 0
while True:
r = int(128+128*math.sin(i))
move.set_leds(r, 255-r, 0)
move.update_leds()
time.sleep(.1)
i += .2
|
Add a simple Python example
# PS Move API
# Copyright (c) 2011 Thomas Perl <thp.io/about>
# All Rights Reserved
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'build'))
import time
import math
import psmove
move = psmove.PSMove()
i = 0
while True:
r = int(128+128*math.sin(i))
move.set_leds(r, 255-r, 0)
move.update_leds()
time.sleep(.1)
i += .2
|
<commit_before><commit_msg>Add a simple Python example<commit_after>
# PS Move API
# Copyright (c) 2011 Thomas Perl <thp.io/about>
# All Rights Reserved
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'build'))
import time
import math
import psmove
move = psmove.PSMove()
i = 0
while True:
r = int(128+128*math.sin(i))
move.set_leds(r, 255-r, 0)
move.update_leds()
time.sleep(.1)
i += .2
|
|
e57e210f7e475b6ab15a6c874c4a3a763a483b99
|
_cookbook/_division.py
|
_cookbook/_division.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''@package _cookbook
Number Division (True or floor division).
Copyright (c) 2014 Li Yun <leven.cn@gmail.com>
All Rights Reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
assert 1 / 2 == 0.5 # true division, 0 (floor division by default)
assert 1 // 2 == 0 # floor division
|
Create cookbook of Number Division in Python
|
Create cookbook of Number Division in Python
|
Python
|
apache-2.0
|
leven-cn/admin-linux,leven-cn/admin-linux
|
Create cookbook of Number Division in Python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''@package _cookbook
Number Division (True or floor division).
Copyright (c) 2014 Li Yun <leven.cn@gmail.com>
All Rights Reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
assert 1 / 2 == 0.5 # true division, 0 (floor division by default)
assert 1 // 2 == 0 # floor division
|
<commit_before><commit_msg>Create cookbook of Number Division in Python<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''@package _cookbook
Number Division (True or floor division).
Copyright (c) 2014 Li Yun <leven.cn@gmail.com>
All Rights Reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
assert 1 / 2 == 0.5 # true division, 0 (floor division by default)
assert 1 // 2 == 0 # floor division
|
Create cookbook of Number Division in Python#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''@package _cookbook
Number Division (True or floor division).
Copyright (c) 2014 Li Yun <leven.cn@gmail.com>
All Rights Reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
assert 1 / 2 == 0.5 # true division, 0 (floor division by default)
assert 1 // 2 == 0 # floor division
|
<commit_before><commit_msg>Create cookbook of Number Division in Python<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''@package _cookbook
Number Division (True or floor division).
Copyright (c) 2014 Li Yun <leven.cn@gmail.com>
All Rights Reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
from __future__ import division
assert 1 / 2 == 0.5 # true division, 0 (floor division by default)
assert 1 // 2 == 0 # floor division
|
|
b000fc19657b80c46ca9c2d7e6dfdaa16e4d400f
|
scripts/slave/apply_svn_patch.py
|
scripts/slave/apply_svn_patch.py
|
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-p', '--patch-url',
help='The SVN URL to download the patch from.')
parser.add_option('-r', '--root-dir',
help='The root dir in which to apply patch.')
options, args = parser.parse_args()
if args:
parser.error('Unused args: %s' % args)
if not (options.patch_url and options.root_dir):
parser.error('A patch URL and root directory should be specified.')
svn_cat = subprocess.Popen(['svn', 'cat', options.patch_url],
stdout=subprocess.PIPE)
patch = subprocess.Popen(['patch', '-t', '-p', '0', '-d', options.root_dir],
stdin=svn_cat.stdout)
_, err = patch.communicate()
return err or None
if __name__ == '__main__':
sys.exit(main())
|
Add a script which can apply a try job SVN patch via an annotated step.
|
Add a script which can apply a try job SVN patch via an annotated step.
Review URL: https://chromiumcodereview.appspot.com/24688002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@225287 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
Add a script which can apply a try job SVN patch via an annotated step.
Review URL: https://chromiumcodereview.appspot.com/24688002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@225287 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-p', '--patch-url',
help='The SVN URL to download the patch from.')
parser.add_option('-r', '--root-dir',
help='The root dir in which to apply patch.')
options, args = parser.parse_args()
if args:
parser.error('Unused args: %s' % args)
if not (options.patch_url and options.root_dir):
parser.error('A patch URL and root directory should be specified.')
svn_cat = subprocess.Popen(['svn', 'cat', options.patch_url],
stdout=subprocess.PIPE)
patch = subprocess.Popen(['patch', '-t', '-p', '0', '-d', options.root_dir],
stdin=svn_cat.stdout)
_, err = patch.communicate()
return err or None
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add a script which can apply a try job SVN patch via an annotated step.
Review URL: https://chromiumcodereview.appspot.com/24688002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@225287 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-p', '--patch-url',
help='The SVN URL to download the patch from.')
parser.add_option('-r', '--root-dir',
help='The root dir in which to apply patch.')
options, args = parser.parse_args()
if args:
parser.error('Unused args: %s' % args)
if not (options.patch_url and options.root_dir):
parser.error('A patch URL and root directory should be specified.')
svn_cat = subprocess.Popen(['svn', 'cat', options.patch_url],
stdout=subprocess.PIPE)
patch = subprocess.Popen(['patch', '-t', '-p', '0', '-d', options.root_dir],
stdin=svn_cat.stdout)
_, err = patch.communicate()
return err or None
if __name__ == '__main__':
sys.exit(main())
|
Add a script which can apply a try job SVN patch via an annotated step.
Review URL: https://chromiumcodereview.appspot.com/24688002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@225287 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-p', '--patch-url',
help='The SVN URL to download the patch from.')
parser.add_option('-r', '--root-dir',
help='The root dir in which to apply patch.')
options, args = parser.parse_args()
if args:
parser.error('Unused args: %s' % args)
if not (options.patch_url and options.root_dir):
parser.error('A patch URL and root directory should be specified.')
svn_cat = subprocess.Popen(['svn', 'cat', options.patch_url],
stdout=subprocess.PIPE)
patch = subprocess.Popen(['patch', '-t', '-p', '0', '-d', options.root_dir],
stdin=svn_cat.stdout)
_, err = patch.communicate()
return err or None
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add a script which can apply a try job SVN patch via an annotated step.
Review URL: https://chromiumcodereview.appspot.com/24688002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@225287 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-p', '--patch-url',
help='The SVN URL to download the patch from.')
parser.add_option('-r', '--root-dir',
help='The root dir in which to apply patch.')
options, args = parser.parse_args()
if args:
parser.error('Unused args: %s' % args)
if not (options.patch_url and options.root_dir):
parser.error('A patch URL and root directory should be specified.')
svn_cat = subprocess.Popen(['svn', 'cat', options.patch_url],
stdout=subprocess.PIPE)
patch = subprocess.Popen(['patch', '-t', '-p', '0', '-d', options.root_dir],
stdin=svn_cat.stdout)
_, err = patch.communicate()
return err or None
if __name__ == '__main__':
sys.exit(main())
|
|
257531907b7bcca1b89172d8c50d4a35b3f25117
|
docs/examples/ccsd.py
|
docs/examples/ccsd.py
|
"""Automatic derivation of CCSD equations.
"""
import urllib.request
from pyspark import SparkConf, SparkContext
from sympy import IndexedBase, Rational
from drudge import PartHoleDrudge, CR, AN
conf = SparkConf().setAppName('CCSD-derivation')
ctx = SparkContext(conf=conf)
dr = PartHoleDrudge(ctx)
p = dr.names
c_ = dr.op[AN]
c_dag = dr.op[CR]
a, b = p.V_dumms[:2]
i, j = p.O_dumms[:2]
t1 = IndexedBase('t1')
t2 = IndexedBase('t2')
dr.set_dbbar_base(t2, 2)
singles = dr.sum(
(a, p.V), (i, p.O), t1[a, i] * c_dag[a] * c_[i]
)
doubles = dr.sum(
(a, p.V), (b, p.V), (i, p.O), (j, p.O),
Rational(1, 4) * t2[a, b, i, j] * c_dag[a] * c_dag[b] * c_[j] * c_[i]
)
clusters = singles + doubles
curr = dr.ham
h_bar = dr.ham
for order in range(0, 4):
curr = (curr | clusters).simplify() * Rational(1, order + 1)
h_bar += curr
en_eqn = dr.eval_fermi_vev(h_bar).simplify()
proj = c_dag[i] * c_[a]
t1_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
proj = c_dag[i] * c_dag[j] * c_[b] * c_[a]
t2_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
# Check with the result from TCE.
TCE_BASE_URL = 'http://www.scs.illinois.edu/~sohirata/'
tce_res = [
dr.parse_tce(
urllib.request.urlopen(TCE_BASE_URL + i).read().decode(),
{1: t1, 2: t2}
).simplify()
for i in ['ccsd_e.out', 'ccsd_t1.out', 'ccsd_t2.out']
]
print('Checking with TCE result: ')
print('Energy: ', en_eqn == tce_res[0])
print('T1 amplitude: ', t1_eqn == tce_res[1])
print('T2 amplitude: ', t2_eqn == tce_res[2])
|
Add example for CCSD theory
|
Add example for CCSD theory
When run serially, it takes about five minutes on Macbook Pro 2015.
This is a big success.
|
Python
|
mit
|
tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge
|
Add example for CCSD theory
When run serially, it takes about five minutes on Macbook Pro 2015.
This is a big success.
|
"""Automatic derivation of CCSD equations.
"""
import urllib.request
from pyspark import SparkConf, SparkContext
from sympy import IndexedBase, Rational
from drudge import PartHoleDrudge, CR, AN
conf = SparkConf().setAppName('CCSD-derivation')
ctx = SparkContext(conf=conf)
dr = PartHoleDrudge(ctx)
p = dr.names
c_ = dr.op[AN]
c_dag = dr.op[CR]
a, b = p.V_dumms[:2]
i, j = p.O_dumms[:2]
t1 = IndexedBase('t1')
t2 = IndexedBase('t2')
dr.set_dbbar_base(t2, 2)
singles = dr.sum(
(a, p.V), (i, p.O), t1[a, i] * c_dag[a] * c_[i]
)
doubles = dr.sum(
(a, p.V), (b, p.V), (i, p.O), (j, p.O),
Rational(1, 4) * t2[a, b, i, j] * c_dag[a] * c_dag[b] * c_[j] * c_[i]
)
clusters = singles + doubles
curr = dr.ham
h_bar = dr.ham
for order in range(0, 4):
curr = (curr | clusters).simplify() * Rational(1, order + 1)
h_bar += curr
en_eqn = dr.eval_fermi_vev(h_bar).simplify()
proj = c_dag[i] * c_[a]
t1_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
proj = c_dag[i] * c_dag[j] * c_[b] * c_[a]
t2_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
# Check with the result from TCE.
TCE_BASE_URL = 'http://www.scs.illinois.edu/~sohirata/'
tce_res = [
dr.parse_tce(
urllib.request.urlopen(TCE_BASE_URL + i).read().decode(),
{1: t1, 2: t2}
).simplify()
for i in ['ccsd_e.out', 'ccsd_t1.out', 'ccsd_t2.out']
]
print('Checking with TCE result: ')
print('Energy: ', en_eqn == tce_res[0])
print('T1 amplitude: ', t1_eqn == tce_res[1])
print('T2 amplitude: ', t2_eqn == tce_res[2])
|
<commit_before><commit_msg>Add example for CCSD theory
When run serially, it takes about five minutes on Macbook Pro 2015.
This is a big success.<commit_after>
|
"""Automatic derivation of CCSD equations.
"""
import urllib.request
from pyspark import SparkConf, SparkContext
from sympy import IndexedBase, Rational
from drudge import PartHoleDrudge, CR, AN
conf = SparkConf().setAppName('CCSD-derivation')
ctx = SparkContext(conf=conf)
dr = PartHoleDrudge(ctx)
p = dr.names
c_ = dr.op[AN]
c_dag = dr.op[CR]
a, b = p.V_dumms[:2]
i, j = p.O_dumms[:2]
t1 = IndexedBase('t1')
t2 = IndexedBase('t2')
dr.set_dbbar_base(t2, 2)
singles = dr.sum(
(a, p.V), (i, p.O), t1[a, i] * c_dag[a] * c_[i]
)
doubles = dr.sum(
(a, p.V), (b, p.V), (i, p.O), (j, p.O),
Rational(1, 4) * t2[a, b, i, j] * c_dag[a] * c_dag[b] * c_[j] * c_[i]
)
clusters = singles + doubles
curr = dr.ham
h_bar = dr.ham
for order in range(0, 4):
curr = (curr | clusters).simplify() * Rational(1, order + 1)
h_bar += curr
en_eqn = dr.eval_fermi_vev(h_bar).simplify()
proj = c_dag[i] * c_[a]
t1_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
proj = c_dag[i] * c_dag[j] * c_[b] * c_[a]
t2_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
# Check with the result from TCE.
TCE_BASE_URL = 'http://www.scs.illinois.edu/~sohirata/'
tce_res = [
dr.parse_tce(
urllib.request.urlopen(TCE_BASE_URL + i).read().decode(),
{1: t1, 2: t2}
).simplify()
for i in ['ccsd_e.out', 'ccsd_t1.out', 'ccsd_t2.out']
]
print('Checking with TCE result: ')
print('Energy: ', en_eqn == tce_res[0])
print('T1 amplitude: ', t1_eqn == tce_res[1])
print('T2 amplitude: ', t2_eqn == tce_res[2])
|
Add example for CCSD theory
When run serially, it takes about five minutes on Macbook Pro 2015.
This is a big success."""Automatic derivation of CCSD equations.
"""
import urllib.request
from pyspark import SparkConf, SparkContext
from sympy import IndexedBase, Rational
from drudge import PartHoleDrudge, CR, AN
conf = SparkConf().setAppName('CCSD-derivation')
ctx = SparkContext(conf=conf)
dr = PartHoleDrudge(ctx)
p = dr.names
c_ = dr.op[AN]
c_dag = dr.op[CR]
a, b = p.V_dumms[:2]
i, j = p.O_dumms[:2]
t1 = IndexedBase('t1')
t2 = IndexedBase('t2')
dr.set_dbbar_base(t2, 2)
singles = dr.sum(
(a, p.V), (i, p.O), t1[a, i] * c_dag[a] * c_[i]
)
doubles = dr.sum(
(a, p.V), (b, p.V), (i, p.O), (j, p.O),
Rational(1, 4) * t2[a, b, i, j] * c_dag[a] * c_dag[b] * c_[j] * c_[i]
)
clusters = singles + doubles
curr = dr.ham
h_bar = dr.ham
for order in range(0, 4):
curr = (curr | clusters).simplify() * Rational(1, order + 1)
h_bar += curr
en_eqn = dr.eval_fermi_vev(h_bar).simplify()
proj = c_dag[i] * c_[a]
t1_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
proj = c_dag[i] * c_dag[j] * c_[b] * c_[a]
t2_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
# Check with the result from TCE.
TCE_BASE_URL = 'http://www.scs.illinois.edu/~sohirata/'
tce_res = [
dr.parse_tce(
urllib.request.urlopen(TCE_BASE_URL + i).read().decode(),
{1: t1, 2: t2}
).simplify()
for i in ['ccsd_e.out', 'ccsd_t1.out', 'ccsd_t2.out']
]
print('Checking with TCE result: ')
print('Energy: ', en_eqn == tce_res[0])
print('T1 amplitude: ', t1_eqn == tce_res[1])
print('T2 amplitude: ', t2_eqn == tce_res[2])
|
<commit_before><commit_msg>Add example for CCSD theory
When run serially, it takes about five minutes on Macbook Pro 2015.
This is a big success.<commit_after>"""Automatic derivation of CCSD equations.
"""
import urllib.request
from pyspark import SparkConf, SparkContext
from sympy import IndexedBase, Rational
from drudge import PartHoleDrudge, CR, AN
conf = SparkConf().setAppName('CCSD-derivation')
ctx = SparkContext(conf=conf)
dr = PartHoleDrudge(ctx)
p = dr.names
c_ = dr.op[AN]
c_dag = dr.op[CR]
a, b = p.V_dumms[:2]
i, j = p.O_dumms[:2]
t1 = IndexedBase('t1')
t2 = IndexedBase('t2')
dr.set_dbbar_base(t2, 2)
singles = dr.sum(
(a, p.V), (i, p.O), t1[a, i] * c_dag[a] * c_[i]
)
doubles = dr.sum(
(a, p.V), (b, p.V), (i, p.O), (j, p.O),
Rational(1, 4) * t2[a, b, i, j] * c_dag[a] * c_dag[b] * c_[j] * c_[i]
)
clusters = singles + doubles
curr = dr.ham
h_bar = dr.ham
for order in range(0, 4):
curr = (curr | clusters).simplify() * Rational(1, order + 1)
h_bar += curr
en_eqn = dr.eval_fermi_vev(h_bar).simplify()
proj = c_dag[i] * c_[a]
t1_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
proj = c_dag[i] * c_dag[j] * c_[b] * c_[a]
t2_eqn = dr.eval_fermi_vev(proj * h_bar).simplify()
# Check with the result from TCE.
TCE_BASE_URL = 'http://www.scs.illinois.edu/~sohirata/'
tce_res = [
dr.parse_tce(
urllib.request.urlopen(TCE_BASE_URL + i).read().decode(),
{1: t1, 2: t2}
).simplify()
for i in ['ccsd_e.out', 'ccsd_t1.out', 'ccsd_t2.out']
]
print('Checking with TCE result: ')
print('Energy: ', en_eqn == tce_res[0])
print('T1 amplitude: ', t1_eqn == tce_res[1])
print('T2 amplitude: ', t2_eqn == tce_res[2])
|
|
80f0ba529cdd2ffd26bc74f5c2a22cfb315ce93a
|
bamova/bamov2npy.py
|
bamova/bamov2npy.py
|
import sys
import numpy as np
def read_phi(flname, n_steps, n_loci, stride):
sampled_phis = np.zeros((n_steps / stride, n_loci))
fl = open(flname)
current_iter_idx = 0 # index used for storage
last_iter_idx = 0 # index used to identify when we finish a step
for ln in fl:
cols = ln.strip().split(",")
iter_idx = int(cols[0])
locus_idx = int(cols[1])
phi = float(cols[2])
if current_iter % stride == 0:
if last_iter_idx != iter_idx:
last_iter_idx = iter_idx
current_iter_idx += 1
sampled_phis[current_iter_idx, locus_idx] = phi
fl.close()
return sampled_phis
if __name__ == "__main__":
bamova_phi_output_flname = sys.argv[1]
n_steps = int(sys.argv[2])
n_loci = int(sys.argv[3])
stride = int(sys.argv[4])
npy_flname = sys.argv[5]
matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci, stride)
np.save(matrix, npy_flname)
|
Add script for converting bamova phi text output to binary npy file
|
Add script for converting bamova phi text output to binary npy file
|
Python
|
apache-2.0
|
rnowling/pop-gen-models
|
Add script for converting bamova phi text output to binary npy file
|
import sys
import numpy as np
def read_phi(flname, n_steps, n_loci, stride):
sampled_phis = np.zeros((n_steps / stride, n_loci))
fl = open(flname)
current_iter_idx = 0 # index used for storage
last_iter_idx = 0 # index used to identify when we finish a step
for ln in fl:
cols = ln.strip().split(",")
iter_idx = int(cols[0])
locus_idx = int(cols[1])
phi = float(cols[2])
if current_iter % stride == 0:
if last_iter_idx != iter_idx:
last_iter_idx = iter_idx
current_iter_idx += 1
sampled_phis[current_iter_idx, locus_idx] = phi
fl.close()
return sampled_phis
if __name__ == "__main__":
bamova_phi_output_flname = sys.argv[1]
n_steps = int(sys.argv[2])
n_loci = int(sys.argv[3])
stride = int(sys.argv[4])
npy_flname = sys.argv[5]
matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci, stride)
np.save(matrix, npy_flname)
|
<commit_before><commit_msg>Add script for converting bamova phi text output to binary npy file<commit_after>
|
import sys
import numpy as np
def read_phi(flname, n_steps, n_loci, stride):
sampled_phis = np.zeros((n_steps / stride, n_loci))
fl = open(flname)
current_iter_idx = 0 # index used for storage
last_iter_idx = 0 # index used to identify when we finish a step
for ln in fl:
cols = ln.strip().split(",")
iter_idx = int(cols[0])
locus_idx = int(cols[1])
phi = float(cols[2])
if current_iter % stride == 0:
if last_iter_idx != iter_idx:
last_iter_idx = iter_idx
current_iter_idx += 1
sampled_phis[current_iter_idx, locus_idx] = phi
fl.close()
return sampled_phis
if __name__ == "__main__":
bamova_phi_output_flname = sys.argv[1]
n_steps = int(sys.argv[2])
n_loci = int(sys.argv[3])
stride = int(sys.argv[4])
npy_flname = sys.argv[5]
matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci, stride)
np.save(matrix, npy_flname)
|
Add script for converting bamova phi text output to binary npy fileimport sys
import numpy as np
def read_phi(flname, n_steps, n_loci, stride):
sampled_phis = np.zeros((n_steps / stride, n_loci))
fl = open(flname)
current_iter_idx = 0 # index used for storage
last_iter_idx = 0 # index used to identify when we finish a step
for ln in fl:
cols = ln.strip().split(",")
iter_idx = int(cols[0])
locus_idx = int(cols[1])
phi = float(cols[2])
if current_iter % stride == 0:
if last_iter_idx != iter_idx:
last_iter_idx = iter_idx
current_iter_idx += 1
sampled_phis[current_iter_idx, locus_idx] = phi
fl.close()
return sampled_phis
if __name__ == "__main__":
bamova_phi_output_flname = sys.argv[1]
n_steps = int(sys.argv[2])
n_loci = int(sys.argv[3])
stride = int(sys.argv[4])
npy_flname = sys.argv[5]
matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci, stride)
np.save(matrix, npy_flname)
|
<commit_before><commit_msg>Add script for converting bamova phi text output to binary npy file<commit_after>import sys
import numpy as np
def read_phi(flname, n_steps, n_loci, stride):
sampled_phis = np.zeros((n_steps / stride, n_loci))
fl = open(flname)
current_iter_idx = 0 # index used for storage
last_iter_idx = 0 # index used to identify when we finish a step
for ln in fl:
cols = ln.strip().split(",")
iter_idx = int(cols[0])
locus_idx = int(cols[1])
phi = float(cols[2])
if current_iter % stride == 0:
if last_iter_idx != iter_idx:
last_iter_idx = iter_idx
current_iter_idx += 1
sampled_phis[current_iter_idx, locus_idx] = phi
fl.close()
return sampled_phis
if __name__ == "__main__":
bamova_phi_output_flname = sys.argv[1]
n_steps = int(sys.argv[2])
n_loci = int(sys.argv[3])
stride = int(sys.argv[4])
npy_flname = sys.argv[5]
matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci, stride)
np.save(matrix, npy_flname)
|
|
b839fd3a98dd943a19f5f852da89eae43470b89f
|
robot/robot/src/autonomous/hot_shoot.py
|
robot/robot/src/autonomous/hot_shoot.py
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = False
MODE_NAME = "Hot shoot"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean("IsHot", False)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
self.decided = False
self.started = False
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hot = wpilib.SmartDashboard.GetBoolean("IsHot")
if self.hot:
self.decided = True
self.start_time = time_elapsed
elif time_elapsed > 6:
self.decided = True
self.start_time = time_elapsed
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
elif time_elapsed < 1.5:
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
else:
if not self.started and self.decided:
self.decided = True
self.started = True
self.start_time = time_elapsed
if self.decided:
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
|
Add an autonomous mode based on the hot goal
|
Add an autonomous mode based on the hot goal
|
Python
|
bsd-3-clause
|
frc1418/2014
|
Add an autonomous mode based on the hot goal
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = False
MODE_NAME = "Hot shoot"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean("IsHot", False)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
self.decided = False
self.started = False
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hot = wpilib.SmartDashboard.GetBoolean("IsHot")
if self.hot:
self.decided = True
self.start_time = time_elapsed
elif time_elapsed > 6:
self.decided = True
self.start_time = time_elapsed
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
elif time_elapsed < 1.5:
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
else:
if not self.started and self.decided:
self.decided = True
self.started = True
self.start_time = time_elapsed
if self.decided:
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
|
<commit_before><commit_msg>Add an autonomous mode based on the hot goal<commit_after>
|
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = False
MODE_NAME = "Hot shoot"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean("IsHot", False)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
self.decided = False
self.started = False
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hot = wpilib.SmartDashboard.GetBoolean("IsHot")
if self.hot:
self.decided = True
self.start_time = time_elapsed
elif time_elapsed > 6:
self.decided = True
self.start_time = time_elapsed
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
elif time_elapsed < 1.5:
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
else:
if not self.started and self.decided:
self.decided = True
self.started = True
self.start_time = time_elapsed
if self.decided:
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
|
Add an autonomous mode based on the hot goal
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = False
MODE_NAME = "Hot shoot"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean("IsHot", False)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
self.decided = False
self.started = False
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hot = wpilib.SmartDashboard.GetBoolean("IsHot")
if self.hot:
self.decided = True
self.start_time = time_elapsed
elif time_elapsed > 6:
self.decided = True
self.start_time = time_elapsed
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
elif time_elapsed < 1.5:
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
else:
if not self.started and self.decided:
self.decided = True
self.started = True
self.start_time = time_elapsed
if self.decided:
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
|
<commit_before><commit_msg>Add an autonomous mode based on the hot goal<commit_after>
try:
import wpilib
except ImportError:
from pyfrc import wpilib
class main(object):
'''autonomous program'''
DEFAULT = False
MODE_NAME = "Hot shoot"
def __init__ (self, components):
''' initialize'''
super().__init__()
self.drive = components['drive']
self.intake = components['intake']
self.catapult = components['catapult']
# number of seconds to drive forward, allow us to tune it via SmartDashboard
wpilib.SmartDashboard.PutNumber('AutoDriveTime', 1.4)
wpilib.SmartDashboard.PutNumber('AutoDriveSpeed', 0.5)
wpilib.SmartDashboard.PutBoolean("IsHot", False)
def on_enable(self):
'''these are called when autonomous starts'''
self.drive_time = wpilib.SmartDashboard.GetNumber('AutoDriveTime')
self.drive_speed = wpilib.SmartDashboard.GetNumber('AutoDriveSpeed')
self.decided = False
self.started = False
print("Team 1418 autonomous code for 2014")
print("-> Drive time:", self.drive_time, "seconds")
print("-> Drive speed:", self.drive_speed)
#print("-> Battery voltage: %.02fv" % wpilib.DriverStation.GetInstance().GetBatteryVoltage())
def on_disable(self):
'''This function is called when autonomous mode is disabled'''
pass
def update(self, time_elapsed):
'''The actual autonomous program'''
# decide if it's hot or not
if not self.decided:
self.hot = wpilib.SmartDashboard.GetBoolean("IsHot")
if self.hot:
self.decided = True
self.start_time = time_elapsed
elif time_elapsed > 6:
self.decided = True
self.start_time = time_elapsed
# always pulldown
if time_elapsed > 0.3:
self.catapult.pulldown()
if time_elapsed < 0.3:
# Get the arm down so that we can winch
self.intake.armDown()
elif time_elapsed < 1.5:
# The arm is at least far enough down now that
# the winch won't hit it, start winching
self.intake.armDown()
else:
if not self.started and self.decided:
self.decided = True
self.started = True
self.start_time = time_elapsed
if self.decided:
time_elapsed = time_elapsed - self.start_time
if time_elapsed < self.drive_time:
# Drive slowly forward for N seconds
self.drive.move(0, self.drive_speed, 0)
elif time_elapsed < 2.0 + self.drive_time + 1.0:
# Finally, fire and keep firing for 1 seconds
self.catapult.launchNoSensor()
|
|
7a37c8a55ce74766b16d35c61a4f0197cdcc6663
|
cyber/python/cyber_py/cyber_py3.py
|
cyber/python/cyber_py/cyber_py3.py
|
#!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
sys.path = [path for path in sys.path if "python2.7" not in path]
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init_py3')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
|
Implement python wrapper for py_init.
|
Cyber: Implement python wrapper for py_init.
|
Python
|
apache-2.0
|
xiaoxq/apollo,jinghaomiao/apollo,jinghaomiao/apollo,ApolloAuto/apollo,ycool/apollo,xiaoxq/apollo,ycool/apollo,xiaoxq/apollo,ApolloAuto/apollo,ycool/apollo,jinghaomiao/apollo,xiaoxq/apollo,ApolloAuto/apollo,ApolloAuto/apollo,ycool/apollo,xiaoxq/apollo,xiaoxq/apollo,ycool/apollo,ApolloAuto/apollo,ApolloAuto/apollo,jinghaomiao/apollo,jinghaomiao/apollo,jinghaomiao/apollo,ycool/apollo
|
Cyber: Implement python wrapper for py_init.
|
#!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
sys.path = [path for path in sys.path if "python2.7" not in path]
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init_py3')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
|
<commit_before><commit_msg>Cyber: Implement python wrapper for py_init.<commit_after>
|
#!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
sys.path = [path for path in sys.path if "python2.7" not in path]
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init_py3')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
|
Cyber: Implement python wrapper for py_init.#!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
sys.path = [path for path in sys.path if "python2.7" not in path]
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init_py3')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
|
<commit_before><commit_msg>Cyber: Implement python wrapper for py_init.<commit_after>#!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import sys
sys.path = [path for path in sys.path if "python2.7" not in path]
import os
import importlib
import time
import threading
import ctypes
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
CYBER_PATH = os.environ['CYBER_PATH']
CYBER_DIR = os.path.split(CYBER_PATH)[0]
sys.path.append(CYBER_PATH + "/third_party/")
sys.path.append(CYBER_PATH + "/lib/")
sys.path.append(CYBER_PATH + "/python/cyber")
sys.path.append(CYBER_PATH + "/python/cyber_py")
sys.path.append(CYBER_PATH + "/lib/python/")
sys.path.append(CYBER_DIR + "/python/")
sys.path.append(CYBER_DIR + "/cyber/")
_CYBER_INIT = importlib.import_module('_cyber_init_py3')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER_INIT.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER_INIT.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER_INIT.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER_INIT.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER_INIT.py_waitforshutdown()
|
|
ef64604235fc3d113288298a1d48e02290400360
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.14',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 0.2.13
|
Update the PyPI version to 0.2.13
|
Python
|
mit
|
electronick1/todoist-python,Doist/todoist-python
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 0.2.13
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.14',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 0.2.13<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.14',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 0.2.13# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.14',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 0.2.13<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.14',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
b9f161820ae881227702167b2ba33d478936df8d
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive population utility."
setup(
name='cnx-archive-population',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-archive-population",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
[console_scripts]
cnx-archive-populate = populate:main
""",
# test_suite=''
)
|
Make it a package for better dependency resolution.
|
Make it a package for better dependency resolution.
|
Python
|
agpl-3.0
|
pumazi/cnx-population,pumazi/cnx-populate
|
Make it a package for better dependency resolution.
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive population utility."
setup(
name='cnx-archive-population',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-archive-population",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
[console_scripts]
cnx-archive-populate = populate:main
""",
# test_suite=''
)
|
<commit_before><commit_msg>Make it a package for better dependency resolution.<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive population utility."
setup(
name='cnx-archive-population',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-archive-population",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
[console_scripts]
cnx-archive-populate = populate:main
""",
# test_suite=''
)
|
Make it a package for better dependency resolution.# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive population utility."
setup(
name='cnx-archive-population',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-archive-population",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
[console_scripts]
cnx-archive-populate = populate:main
""",
# test_suite=''
)
|
<commit_before><commit_msg>Make it a package for better dependency resolution.<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive population utility."
setup(
name='cnx-archive-population',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-archive-population",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
[console_scripts]
cnx-archive-populate = populate:main
""",
# test_suite=''
)
|
|
bdafa10312d050b78f23af439262d4c99d07b191
|
jobs/migrations/0009_auto_20150317_1815.py
|
jobs/migrations/0009_auto_20150317_1815.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_auto_20150316_1205'),
]
operations = [
migrations.AlterField(
model_name='job',
name='agencies',
field=models.BooleanField(verbose_name='Agencies are OK to contact?', default=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='contact',
field=models.CharField(verbose_name='Contact name', blank=True, null=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='description',
field=markupfield.fields.MarkupField(verbose_name='Job description', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='email',
field=models.EmailField(verbose_name='Contact email', max_length=75),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='other_job_type',
field=models.CharField(verbose_name='Other job technologies', blank=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='requirements',
field=markupfield.fields.MarkupField(verbose_name='Job requirements', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='telecommuting',
field=models.BooleanField(verbose_name='Telecommuting allowed?', default=False),
preserve_default=True,
),
]
|
Add missing migration for verbose_name changes in the jobs app.
|
Add missing migration for verbose_name changes in the jobs app.
Refs #641.
|
Python
|
apache-2.0
|
malemburg/pythondotorg,SujaySKumar/pythondotorg,fe11x/pythondotorg,fe11x/pythondotorg,lebronhkh/pythondotorg,manhhomienbienthuy/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,manhhomienbienthuy/pythondotorg,Mariatta/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,willingc/pythondotorg,ahua/pythondotorg,manhhomienbienthuy/pythondotorg,willingc/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,berkerpeksag/pythondotorg,demvher/pythondotorg,malemburg/pythondotorg,ahua/pythondotorg,SujaySKumar/pythondotorg,malemburg/pythondotorg,python/pythondotorg,lebronhkh/pythondotorg,python/pythondotorg,demvher/pythondotorg,python/pythondotorg,lepture/pythondotorg,demvher/pythondotorg,lebronhkh/pythondotorg,lebronhkh/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,malemburg/pythondotorg,proevo/pythondotorg,fe11x/pythondotorg,python/pythondotorg,berkerpeksag/pythondotorg,demvher/pythondotorg,berkerpeksag/pythondotorg,lepture/pythondotorg,ahua/pythondotorg,SujaySKumar/pythondotorg,proevo/pythondotorg,demvher/pythondotorg,ahua/pythondotorg,fe11x/pythondotorg,lepture/pythondotorg,fe11x/pythondotorg,manhhomienbienthuy/pythondotorg,berkerpeksag/pythondotorg,willingc/pythondotorg,SujaySKumar/pythondotorg,ahua/pythondotorg,willingc/pythondotorg,SujaySKumar/pythondotorg
|
Add missing migration for verbose_name changes in the jobs app.
Refs #641.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_auto_20150316_1205'),
]
operations = [
migrations.AlterField(
model_name='job',
name='agencies',
field=models.BooleanField(verbose_name='Agencies are OK to contact?', default=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='contact',
field=models.CharField(verbose_name='Contact name', blank=True, null=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='description',
field=markupfield.fields.MarkupField(verbose_name='Job description', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='email',
field=models.EmailField(verbose_name='Contact email', max_length=75),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='other_job_type',
field=models.CharField(verbose_name='Other job technologies', blank=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='requirements',
field=markupfield.fields.MarkupField(verbose_name='Job requirements', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='telecommuting',
field=models.BooleanField(verbose_name='Telecommuting allowed?', default=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for verbose_name changes in the jobs app.
Refs #641.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_auto_20150316_1205'),
]
operations = [
migrations.AlterField(
model_name='job',
name='agencies',
field=models.BooleanField(verbose_name='Agencies are OK to contact?', default=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='contact',
field=models.CharField(verbose_name='Contact name', blank=True, null=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='description',
field=markupfield.fields.MarkupField(verbose_name='Job description', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='email',
field=models.EmailField(verbose_name='Contact email', max_length=75),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='other_job_type',
field=models.CharField(verbose_name='Other job technologies', blank=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='requirements',
field=markupfield.fields.MarkupField(verbose_name='Job requirements', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='telecommuting',
field=models.BooleanField(verbose_name='Telecommuting allowed?', default=False),
preserve_default=True,
),
]
|
Add missing migration for verbose_name changes in the jobs app.
Refs #641.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_auto_20150316_1205'),
]
operations = [
migrations.AlterField(
model_name='job',
name='agencies',
field=models.BooleanField(verbose_name='Agencies are OK to contact?', default=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='contact',
field=models.CharField(verbose_name='Contact name', blank=True, null=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='description',
field=markupfield.fields.MarkupField(verbose_name='Job description', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='email',
field=models.EmailField(verbose_name='Contact email', max_length=75),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='other_job_type',
field=models.CharField(verbose_name='Other job technologies', blank=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='requirements',
field=markupfield.fields.MarkupField(verbose_name='Job requirements', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='telecommuting',
field=models.BooleanField(verbose_name='Telecommuting allowed?', default=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for verbose_name changes in the jobs app.
Refs #641.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_auto_20150316_1205'),
]
operations = [
migrations.AlterField(
model_name='job',
name='agencies',
field=models.BooleanField(verbose_name='Agencies are OK to contact?', default=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='contact',
field=models.CharField(verbose_name='Contact name', blank=True, null=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='description',
field=markupfield.fields.MarkupField(verbose_name='Job description', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='email',
field=models.EmailField(verbose_name='Contact email', max_length=75),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='other_job_type',
field=models.CharField(verbose_name='Other job technologies', blank=True, max_length=100),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='requirements',
field=markupfield.fields.MarkupField(verbose_name='Job requirements', rendered_field=True),
preserve_default=True,
),
migrations.AlterField(
model_name='job',
name='telecommuting',
field=models.BooleanField(verbose_name='Telecommuting allowed?', default=False),
preserve_default=True,
),
]
|
|
d3b34eeebfb22e8dc5f31b0ee849187ea8042f92
|
migrations/versions/1444ba317b65_.py
|
migrations/versions/1444ba317b65_.py
|
"""empty message
Revision ID: 1444ba317b65
Revises: None
Create Date: 2013-11-25 10:32:40.678287
"""
# revision identifiers, used by Alembic.
revision = '1444ba317b65'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('nodes',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name'),
sa.UniqueConstraint('name')
)
op.create_table('packages',
sa.Column('sha', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('version', sa.String(length=64), nullable=True),
sa.Column('uri', sa.String(length=255), nullable=True),
sa.Column('summary', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=10), nullable=True),
sa.Column('architecture', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('sha'),
sa.UniqueConstraint('sha')
)
op.create_table('node_packages',
sa.Column('node_name', sa.String(), nullable=False),
sa.Column('package_sha', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['node_name'], ['nodes.name'], ),
sa.ForeignKeyConstraint(['package_sha'], ['packages.sha'], ),
sa.PrimaryKeyConstraint('node_name', 'package_sha')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('node_packages')
op.drop_table('packages')
op.drop_table('nodes')
### end Alembic commands ###
|
Update db models and migrations to comply with tests
|
Update db models and migrations to comply with tests
Update test cases to include more scenarios. This required changes in the
db models and collector code.
Delete old migrations and start with a single, new migration.
Signed-off-by: Christoph Kassen <21a19229d09eaaba535d395ba11b820b0886ba87@telekom.de>
|
Python
|
apache-2.0
|
TelekomCloud/pony-express
|
Update db models and migrations to comply with tests
Update test cases to include more scenarios. This required changes in the
db models and collector code.
Delete old migrations and start with a single, new migration.
Signed-off-by: Christoph Kassen <21a19229d09eaaba535d395ba11b820b0886ba87@telekom.de>
|
"""empty message
Revision ID: 1444ba317b65
Revises: None
Create Date: 2013-11-25 10:32:40.678287
"""
# revision identifiers, used by Alembic.
revision = '1444ba317b65'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('nodes',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name'),
sa.UniqueConstraint('name')
)
op.create_table('packages',
sa.Column('sha', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('version', sa.String(length=64), nullable=True),
sa.Column('uri', sa.String(length=255), nullable=True),
sa.Column('summary', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=10), nullable=True),
sa.Column('architecture', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('sha'),
sa.UniqueConstraint('sha')
)
op.create_table('node_packages',
sa.Column('node_name', sa.String(), nullable=False),
sa.Column('package_sha', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['node_name'], ['nodes.name'], ),
sa.ForeignKeyConstraint(['package_sha'], ['packages.sha'], ),
sa.PrimaryKeyConstraint('node_name', 'package_sha')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('node_packages')
op.drop_table('packages')
op.drop_table('nodes')
### end Alembic commands ###
|
<commit_before><commit_msg>Update db models and migrations to comply with tests
Update test cases to include more scenarios. This required changes in the
db models and collector code.
Delete old migrations and start with a single, new migration.
Signed-off-by: Christoph Kassen <21a19229d09eaaba535d395ba11b820b0886ba87@telekom.de><commit_after>
|
"""empty message
Revision ID: 1444ba317b65
Revises: None
Create Date: 2013-11-25 10:32:40.678287
"""
# revision identifiers, used by Alembic.
revision = '1444ba317b65'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('nodes',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name'),
sa.UniqueConstraint('name')
)
op.create_table('packages',
sa.Column('sha', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('version', sa.String(length=64), nullable=True),
sa.Column('uri', sa.String(length=255), nullable=True),
sa.Column('summary', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=10), nullable=True),
sa.Column('architecture', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('sha'),
sa.UniqueConstraint('sha')
)
op.create_table('node_packages',
sa.Column('node_name', sa.String(), nullable=False),
sa.Column('package_sha', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['node_name'], ['nodes.name'], ),
sa.ForeignKeyConstraint(['package_sha'], ['packages.sha'], ),
sa.PrimaryKeyConstraint('node_name', 'package_sha')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('node_packages')
op.drop_table('packages')
op.drop_table('nodes')
### end Alembic commands ###
|
Update db models and migrations to comply with tests
Update test cases to include more scenarios. This required changes in the
db models and collector code.
Delete old migrations and start with a single, new migration.
Signed-off-by: Christoph Kassen <21a19229d09eaaba535d395ba11b820b0886ba87@telekom.de>"""empty message
Revision ID: 1444ba317b65
Revises: None
Create Date: 2013-11-25 10:32:40.678287
"""
# revision identifiers, used by Alembic.
revision = '1444ba317b65'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('nodes',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name'),
sa.UniqueConstraint('name')
)
op.create_table('packages',
sa.Column('sha', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('version', sa.String(length=64), nullable=True),
sa.Column('uri', sa.String(length=255), nullable=True),
sa.Column('summary', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=10), nullable=True),
sa.Column('architecture', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('sha'),
sa.UniqueConstraint('sha')
)
op.create_table('node_packages',
sa.Column('node_name', sa.String(), nullable=False),
sa.Column('package_sha', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['node_name'], ['nodes.name'], ),
sa.ForeignKeyConstraint(['package_sha'], ['packages.sha'], ),
sa.PrimaryKeyConstraint('node_name', 'package_sha')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('node_packages')
op.drop_table('packages')
op.drop_table('nodes')
### end Alembic commands ###
|
<commit_before><commit_msg>Update db models and migrations to comply with tests
Update test cases to include more scenarios. This required changes in the
db models and collector code.
Delete old migrations and start with a single, new migration.
Signed-off-by: Christoph Kassen <21a19229d09eaaba535d395ba11b820b0886ba87@telekom.de><commit_after>"""empty message
Revision ID: 1444ba317b65
Revises: None
Create Date: 2013-11-25 10:32:40.678287
"""
# revision identifiers, used by Alembic.
revision = '1444ba317b65'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('nodes',
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('name'),
sa.UniqueConstraint('name')
)
op.create_table('packages',
sa.Column('sha', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('version', sa.String(length=64), nullable=True),
sa.Column('uri', sa.String(length=255), nullable=True),
sa.Column('summary', sa.String(length=255), nullable=True),
sa.Column('provider', sa.String(length=10), nullable=True),
sa.Column('architecture', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('sha'),
sa.UniqueConstraint('sha')
)
op.create_table('node_packages',
sa.Column('node_name', sa.String(), nullable=False),
sa.Column('package_sha', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['node_name'], ['nodes.name'], ),
sa.ForeignKeyConstraint(['package_sha'], ['packages.sha'], ),
sa.PrimaryKeyConstraint('node_name', 'package_sha')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('node_packages')
op.drop_table('packages')
op.drop_table('nodes')
### end Alembic commands ###
|
|
b51f399b4ac59011a37ade0cb488cb866cb11623
|
__main__.py
|
__main__.py
|
import facebook
from getpass import getpass
token = getpass('User Access Token: ')
graph = facebook.GraphAPI(token)
friends = graph.get_connections("me", "friends")
friend_list = [friend['name'] for friend in friends['data']]
print friend_list
|
Create simple script to get first page of friends list.
|
Create simple script to get first page of friends list.
|
Python
|
mit
|
brendancsmith/cohort-facebook,brendancsmith/cohort-facebook
|
Create simple script to get first page of friends list.
|
import facebook
from getpass import getpass
token = getpass('User Access Token: ')
graph = facebook.GraphAPI(token)
friends = graph.get_connections("me", "friends")
friend_list = [friend['name'] for friend in friends['data']]
print friend_list
|
<commit_before><commit_msg>Create simple script to get first page of friends list.<commit_after>
|
import facebook
from getpass import getpass
token = getpass('User Access Token: ')
graph = facebook.GraphAPI(token)
friends = graph.get_connections("me", "friends")
friend_list = [friend['name'] for friend in friends['data']]
print friend_list
|
Create simple script to get first page of friends list.import facebook
from getpass import getpass
token = getpass('User Access Token: ')
graph = facebook.GraphAPI(token)
friends = graph.get_connections("me", "friends")
friend_list = [friend['name'] for friend in friends['data']]
print friend_list
|
<commit_before><commit_msg>Create simple script to get first page of friends list.<commit_after>import facebook
from getpass import getpass
token = getpass('User Access Token: ')
graph = facebook.GraphAPI(token)
friends = graph.get_connections("me", "friends")
friend_list = [friend['name'] for friend in friends['data']]
print friend_list
|
|
ff15b059b85ac992709e85c68e91e21ffe718078
|
util/make_new_dif.py
|
util/make_new_dif.py
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# make_new_dif.py is a script for quickly applying replacement operations to
# dif_template.h.tpl. See sw/device/lib/dif/dif_template.h.tpl for more
# information.
#
# The produced file may still require spell-checking and general cleaning-up.
import argparse
from pathlib import Path
from mako.template import Template
# This file is $REPO_TOP/util/make_new_dif.py, so it takes two parent()
# calls to get back to the top.
REPO_TOP = Path(__file__).resolve().parent.parent
def main():
dif_dir = REPO_TOP / 'sw/device/lib/dif'
template_file = dif_dir / 'dif_template.h.tpl'
parser = argparse.ArgumentParser()
parser.add_argument('--ip',
'-i',
required=True,
help='the short name of the IP, in snake_case')
parser.add_argument('--peripheral',
'-p',
required=True,
help='the documentation-friendly name of the IP')
parser.add_argument(
'--handle-param',
'-a',
default='handle',
help='an optional name to replace the `handle` perameter name')
parser.add_argument('--template',
'-t',
type=Path,
default=template_file,
help='where to find the header template')
parser.add_argument('--output',
'-o',
type=Path,
help='where to write the header; defaults to dif_ip.h')
args = parser.parse_args()
ip_snake = args.ip
ip_camel = ''.join([word.capitalize() for word in args.ip.split('_')])
ip_upper = ip_snake.upper()
periph_lower = args.peripheral
# We just want to set the first character to title case. In particular,
# .capitalize() does not do the right thing, since it would convert
# UART to Uart.
periph_upper = periph_lower[0].upper() + periph_lower[1:]
handle = args.handle_param
with args.template.open('r') as f:
template = Template(f.read())
header = template.render(
ip_snake=ip_snake,
ip_camel=ip_camel,
ip_upper=ip_upper,
periph_lower=periph_lower,
periph_upper=periph_upper,
handle=handle,
)
dif_file = args.output or dif_dir / 'dif_{}.h'.format(ip_snake)
with dif_file.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(str(dif_file)))
if __name__ == '__main__':
main()
|
Add a script for instantiating dif_template.tpl.h
|
[dif/util] Add a script for instantiating dif_template.tpl.h
Signed-off-by: Miguel Young de la Sota <71b8e7f4945fd97b98544cf897992af89646547a@google.com>
|
Python
|
apache-2.0
|
lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan
|
[dif/util] Add a script for instantiating dif_template.tpl.h
Signed-off-by: Miguel Young de la Sota <71b8e7f4945fd97b98544cf897992af89646547a@google.com>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# make_new_dif.py is a script for quickly applying replacement operations to
# dif_template.h.tpl. See sw/device/lib/dif/dif_template.h.tpl for more
# information.
#
# The produced file may still require spell-checking and general cleaning-up.
import argparse
from pathlib import Path
from mako.template import Template
# This file is $REPO_TOP/util/make_new_dif.py, so it takes two parent()
# calls to get back to the top.
REPO_TOP = Path(__file__).resolve().parent.parent
def main():
dif_dir = REPO_TOP / 'sw/device/lib/dif'
template_file = dif_dir / 'dif_template.h.tpl'
parser = argparse.ArgumentParser()
parser.add_argument('--ip',
'-i',
required=True,
help='the short name of the IP, in snake_case')
parser.add_argument('--peripheral',
'-p',
required=True,
help='the documentation-friendly name of the IP')
parser.add_argument(
'--handle-param',
'-a',
default='handle',
help='an optional name to replace the `handle` perameter name')
parser.add_argument('--template',
'-t',
type=Path,
default=template_file,
help='where to find the header template')
parser.add_argument('--output',
'-o',
type=Path,
help='where to write the header; defaults to dif_ip.h')
args = parser.parse_args()
ip_snake = args.ip
ip_camel = ''.join([word.capitalize() for word in args.ip.split('_')])
ip_upper = ip_snake.upper()
periph_lower = args.peripheral
# We just want to set the first character to title case. In particular,
# .capitalize() does not do the right thing, since it would convert
# UART to Uart.
periph_upper = periph_lower[0].upper() + periph_lower[1:]
handle = args.handle_param
with args.template.open('r') as f:
template = Template(f.read())
header = template.render(
ip_snake=ip_snake,
ip_camel=ip_camel,
ip_upper=ip_upper,
periph_lower=periph_lower,
periph_upper=periph_upper,
handle=handle,
)
dif_file = args.output or dif_dir / 'dif_{}.h'.format(ip_snake)
with dif_file.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(str(dif_file)))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[dif/util] Add a script for instantiating dif_template.tpl.h
Signed-off-by: Miguel Young de la Sota <71b8e7f4945fd97b98544cf897992af89646547a@google.com><commit_after>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# make_new_dif.py is a script for quickly applying replacement operations to
# dif_template.h.tpl. See sw/device/lib/dif/dif_template.h.tpl for more
# information.
#
# The produced file may still require spell-checking and general cleaning-up.
import argparse
from pathlib import Path
from mako.template import Template
# This file is $REPO_TOP/util/make_new_dif.py, so it takes two parent()
# calls to get back to the top.
REPO_TOP = Path(__file__).resolve().parent.parent
def main():
dif_dir = REPO_TOP / 'sw/device/lib/dif'
template_file = dif_dir / 'dif_template.h.tpl'
parser = argparse.ArgumentParser()
parser.add_argument('--ip',
'-i',
required=True,
help='the short name of the IP, in snake_case')
parser.add_argument('--peripheral',
'-p',
required=True,
help='the documentation-friendly name of the IP')
parser.add_argument(
'--handle-param',
'-a',
default='handle',
help='an optional name to replace the `handle` perameter name')
parser.add_argument('--template',
'-t',
type=Path,
default=template_file,
help='where to find the header template')
parser.add_argument('--output',
'-o',
type=Path,
help='where to write the header; defaults to dif_ip.h')
args = parser.parse_args()
ip_snake = args.ip
ip_camel = ''.join([word.capitalize() for word in args.ip.split('_')])
ip_upper = ip_snake.upper()
periph_lower = args.peripheral
# We just want to set the first character to title case. In particular,
# .capitalize() does not do the right thing, since it would convert
# UART to Uart.
periph_upper = periph_lower[0].upper() + periph_lower[1:]
handle = args.handle_param
with args.template.open('r') as f:
template = Template(f.read())
header = template.render(
ip_snake=ip_snake,
ip_camel=ip_camel,
ip_upper=ip_upper,
periph_lower=periph_lower,
periph_upper=periph_upper,
handle=handle,
)
dif_file = args.output or dif_dir / 'dif_{}.h'.format(ip_snake)
with dif_file.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(str(dif_file)))
if __name__ == '__main__':
main()
|
[dif/util] Add a script for instantiating dif_template.tpl.h
Signed-off-by: Miguel Young de la Sota <71b8e7f4945fd97b98544cf897992af89646547a@google.com>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# make_new_dif.py is a script for quickly applying replacement operations to
# dif_template.h.tpl. See sw/device/lib/dif/dif_template.h.tpl for more
# information.
#
# The produced file may still require spell-checking and general cleaning-up.
import argparse
from pathlib import Path
from mako.template import Template
# This file is $REPO_TOP/util/make_new_dif.py, so it takes two parent()
# calls to get back to the top.
REPO_TOP = Path(__file__).resolve().parent.parent
def main():
dif_dir = REPO_TOP / 'sw/device/lib/dif'
template_file = dif_dir / 'dif_template.h.tpl'
parser = argparse.ArgumentParser()
parser.add_argument('--ip',
'-i',
required=True,
help='the short name of the IP, in snake_case')
parser.add_argument('--peripheral',
'-p',
required=True,
help='the documentation-friendly name of the IP')
parser.add_argument(
'--handle-param',
'-a',
default='handle',
help='an optional name to replace the `handle` perameter name')
parser.add_argument('--template',
'-t',
type=Path,
default=template_file,
help='where to find the header template')
parser.add_argument('--output',
'-o',
type=Path,
help='where to write the header; defaults to dif_ip.h')
args = parser.parse_args()
ip_snake = args.ip
ip_camel = ''.join([word.capitalize() for word in args.ip.split('_')])
ip_upper = ip_snake.upper()
periph_lower = args.peripheral
# We just want to set the first character to title case. In particular,
# .capitalize() does not do the right thing, since it would convert
# UART to Uart.
periph_upper = periph_lower[0].upper() + periph_lower[1:]
handle = args.handle_param
with args.template.open('r') as f:
template = Template(f.read())
header = template.render(
ip_snake=ip_snake,
ip_camel=ip_camel,
ip_upper=ip_upper,
periph_lower=periph_lower,
periph_upper=periph_upper,
handle=handle,
)
dif_file = args.output or dif_dir / 'dif_{}.h'.format(ip_snake)
with dif_file.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(str(dif_file)))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[dif/util] Add a script for instantiating dif_template.tpl.h
Signed-off-by: Miguel Young de la Sota <71b8e7f4945fd97b98544cf897992af89646547a@google.com><commit_after>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# make_new_dif.py is a script for quickly applying replacement operations to
# dif_template.h.tpl. See sw/device/lib/dif/dif_template.h.tpl for more
# information.
#
# The produced file may still require spell-checking and general cleaning-up.
import argparse
from pathlib import Path
from mako.template import Template
# This file is $REPO_TOP/util/make_new_dif.py, so it takes two parent()
# calls to get back to the top.
REPO_TOP = Path(__file__).resolve().parent.parent
def main():
dif_dir = REPO_TOP / 'sw/device/lib/dif'
template_file = dif_dir / 'dif_template.h.tpl'
parser = argparse.ArgumentParser()
parser.add_argument('--ip',
'-i',
required=True,
help='the short name of the IP, in snake_case')
parser.add_argument('--peripheral',
'-p',
required=True,
help='the documentation-friendly name of the IP')
parser.add_argument(
'--handle-param',
'-a',
default='handle',
help='an optional name to replace the `handle` perameter name')
parser.add_argument('--template',
'-t',
type=Path,
default=template_file,
help='where to find the header template')
parser.add_argument('--output',
'-o',
type=Path,
help='where to write the header; defaults to dif_ip.h')
args = parser.parse_args()
ip_snake = args.ip
ip_camel = ''.join([word.capitalize() for word in args.ip.split('_')])
ip_upper = ip_snake.upper()
periph_lower = args.peripheral
# We just want to set the first character to title case. In particular,
# .capitalize() does not do the right thing, since it would convert
# UART to Uart.
periph_upper = periph_lower[0].upper() + periph_lower[1:]
handle = args.handle_param
with args.template.open('r') as f:
template = Template(f.read())
header = template.render(
ip_snake=ip_snake,
ip_camel=ip_camel,
ip_upper=ip_upper,
periph_lower=periph_lower,
periph_upper=periph_upper,
handle=handle,
)
dif_file = args.output or dif_dir / 'dif_{}.h'.format(ip_snake)
with dif_file.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(str(dif_file)))
if __name__ == '__main__':
main()
|
|
44a66ff4a31763c71c5fa5a31967fc019a1bad2a
|
sa-extract/sa2cdec.py
|
sa-extract/sa2cdec.py
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
Python
|
apache-2.0
|
carhaas/cdec-semparse,carhaas/cdec-semparse,veer66/cdec,pks/cdec-dtrain,carhaas/cdec-semparse,veer66/cdec,redpony/cdec,carhaas/cdec-semparse,pks/cdec-dtrain,m5w/atools,pks/cdec-dtrain,redpony/cdec,pks/cdec-dtrain,pks/cdec-dtrain,veer66/cdec,redpony/cdec,redpony/cdec,veer66/cdec,m5w/atools,veer66/cdec,veer66/cdec,redpony/cdec,m5w/atools,carhaas/cdec-semparse,carhaas/cdec-semparse,pks/cdec-dtrain,redpony/cdec
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
<commit_before><commit_msg>Add script to convert from sa-extract's unnamed format to cdec's more readable named format<commit_after>
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
<commit_before><commit_msg>Add script to convert from sa-extract's unnamed format to cdec's more readable named format<commit_after>#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
|
4de8de5efc5d74bcb5647d2c13dc863c60a9fa58
|
ibei/devossolarcell.py
|
ibei/devossolarcell.py
|
# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants, units
from sympy.mpmath import polylog
from physicalproperty import PhysicalProperty, find_PhysicalProperty
class DeVosSolarcell(SQSolarcell):
"""
DeVos single-junction solar cell
This class implements a solar cell as described by DeVos :cite:`9780198513926` Ch. 6.
An DeVosSolarcell is instantiated with a :class:`dict` having keys identical to the class's public data attributes. Each key's value must satisfy the constraints noted with the corresponding public data attribute. Dictionary values can be some kind of numeric type or of type :class:`astropy.units.Quantity` so long as the units are compatible with what's listed.
"""
temp_planet = PhysicalProperty(unit = "K", lo_bnd = 0)
"""
Planet temperature > 0 [K]
"""
voltage = PhysicalProperty(unit = "V")
"""
Bias voltage [V]
"""
def calc_power_density(self):
"""
Solar cell power density
The output power density is calculated according to DeVos's :cite:`9780198513926` Eq. 6.4. Note that this expression assumes fully concentrated sunlight and is therefore not completely general.
This method returns values of type :class:`astropy.units.Quantity` with units of [W m^-2].
"""
electron_energy = constants.e.si * self.voltage
if self.bandgap == 0:
solar_flux = units.Quantity(0., "1/(m2*s)")
solar_cell_flux = units.Quantity(0., "1/(m2*s)")
else:
solar_flux = uibei(2, self.bandgap, self.temp_sun, 0)
solar_cell_flux = uibei(2, self.bandgap, self.temp_planet, electron_energy)
power_density = electron_energy * (solar_flux - solar_cell_flux)
return power_density.to("W/m^2")
|
Split DeVosSolarcell class into its own file
|
Split DeVosSolarcell class into its own file
|
Python
|
mit
|
jrsmith3/tec,jrsmith3/tec,jrsmith3/ibei
|
Split DeVosSolarcell class into its own file
|
# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants, units
from sympy.mpmath import polylog
from physicalproperty import PhysicalProperty, find_PhysicalProperty
class DeVosSolarcell(SQSolarcell):
"""
DeVos single-junction solar cell
This class implements a solar cell as described by DeVos :cite:`9780198513926` Ch. 6.
An DeVosSolarcell is instantiated with a :class:`dict` having keys identical to the class's public data attributes. Each key's value must satisfy the constraints noted with the corresponding public data attribute. Dictionary values can be some kind of numeric type or of type :class:`astropy.units.Quantity` so long as the units are compatible with what's listed.
"""
temp_planet = PhysicalProperty(unit = "K", lo_bnd = 0)
"""
Planet temperature > 0 [K]
"""
voltage = PhysicalProperty(unit = "V")
"""
Bias voltage [V]
"""
def calc_power_density(self):
"""
Solar cell power density
The output power density is calculated according to DeVos's :cite:`9780198513926` Eq. 6.4. Note that this expression assumes fully concentrated sunlight and is therefore not completely general.
This method returns values of type :class:`astropy.units.Quantity` with units of [W m^-2].
"""
electron_energy = constants.e.si * self.voltage
if self.bandgap == 0:
solar_flux = units.Quantity(0., "1/(m2*s)")
solar_cell_flux = units.Quantity(0., "1/(m2*s)")
else:
solar_flux = uibei(2, self.bandgap, self.temp_sun, 0)
solar_cell_flux = uibei(2, self.bandgap, self.temp_planet, electron_energy)
power_density = electron_energy * (solar_flux - solar_cell_flux)
return power_density.to("W/m^2")
|
<commit_before><commit_msg>Split DeVosSolarcell class into its own file<commit_after>
|
# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants, units
from sympy.mpmath import polylog
from physicalproperty import PhysicalProperty, find_PhysicalProperty
class DeVosSolarcell(SQSolarcell):
"""
DeVos single-junction solar cell
This class implements a solar cell as described by DeVos :cite:`9780198513926` Ch. 6.
An DeVosSolarcell is instantiated with a :class:`dict` having keys identical to the class's public data attributes. Each key's value must satisfy the constraints noted with the corresponding public data attribute. Dictionary values can be some kind of numeric type or of type :class:`astropy.units.Quantity` so long as the units are compatible with what's listed.
"""
temp_planet = PhysicalProperty(unit = "K", lo_bnd = 0)
"""
Planet temperature > 0 [K]
"""
voltage = PhysicalProperty(unit = "V")
"""
Bias voltage [V]
"""
def calc_power_density(self):
"""
Solar cell power density
The output power density is calculated according to DeVos's :cite:`9780198513926` Eq. 6.4. Note that this expression assumes fully concentrated sunlight and is therefore not completely general.
This method returns values of type :class:`astropy.units.Quantity` with units of [W m^-2].
"""
electron_energy = constants.e.si * self.voltage
if self.bandgap == 0:
solar_flux = units.Quantity(0., "1/(m2*s)")
solar_cell_flux = units.Quantity(0., "1/(m2*s)")
else:
solar_flux = uibei(2, self.bandgap, self.temp_sun, 0)
solar_cell_flux = uibei(2, self.bandgap, self.temp_planet, electron_energy)
power_density = electron_energy * (solar_flux - solar_cell_flux)
return power_density.to("W/m^2")
|
Split DeVosSolarcell class into its own file# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants, units
from sympy.mpmath import polylog
from physicalproperty import PhysicalProperty, find_PhysicalProperty
class DeVosSolarcell(SQSolarcell):
"""
DeVos single-junction solar cell
This class implements a solar cell as described by DeVos :cite:`9780198513926` Ch. 6.
An DeVosSolarcell is instantiated with a :class:`dict` having keys identical to the class's public data attributes. Each key's value must satisfy the constraints noted with the corresponding public data attribute. Dictionary values can be some kind of numeric type or of type :class:`astropy.units.Quantity` so long as the units are compatible with what's listed.
"""
temp_planet = PhysicalProperty(unit = "K", lo_bnd = 0)
"""
Planet temperature > 0 [K]
"""
voltage = PhysicalProperty(unit = "V")
"""
Bias voltage [V]
"""
def calc_power_density(self):
"""
Solar cell power density
The output power density is calculated according to DeVos's :cite:`9780198513926` Eq. 6.4. Note that this expression assumes fully concentrated sunlight and is therefore not completely general.
This method returns values of type :class:`astropy.units.Quantity` with units of [W m^-2].
"""
electron_energy = constants.e.si * self.voltage
if self.bandgap == 0:
solar_flux = units.Quantity(0., "1/(m2*s)")
solar_cell_flux = units.Quantity(0., "1/(m2*s)")
else:
solar_flux = uibei(2, self.bandgap, self.temp_sun, 0)
solar_cell_flux = uibei(2, self.bandgap, self.temp_planet, electron_energy)
power_density = electron_energy * (solar_flux - solar_cell_flux)
return power_density.to("W/m^2")
|
<commit_before><commit_msg>Split DeVosSolarcell class into its own file<commit_after># -*- coding: utf-8 -*-
import numpy as np
from astropy import constants, units
from sympy.mpmath import polylog
from physicalproperty import PhysicalProperty, find_PhysicalProperty
class DeVosSolarcell(SQSolarcell):
"""
DeVos single-junction solar cell
This class implements a solar cell as described by DeVos :cite:`9780198513926` Ch. 6.
An DeVosSolarcell is instantiated with a :class:`dict` having keys identical to the class's public data attributes. Each key's value must satisfy the constraints noted with the corresponding public data attribute. Dictionary values can be some kind of numeric type or of type :class:`astropy.units.Quantity` so long as the units are compatible with what's listed.
"""
temp_planet = PhysicalProperty(unit = "K", lo_bnd = 0)
"""
Planet temperature > 0 [K]
"""
voltage = PhysicalProperty(unit = "V")
"""
Bias voltage [V]
"""
def calc_power_density(self):
"""
Solar cell power density
The output power density is calculated according to DeVos's :cite:`9780198513926` Eq. 6.4. Note that this expression assumes fully concentrated sunlight and is therefore not completely general.
This method returns values of type :class:`astropy.units.Quantity` with units of [W m^-2].
"""
electron_energy = constants.e.si * self.voltage
if self.bandgap == 0:
solar_flux = units.Quantity(0., "1/(m2*s)")
solar_cell_flux = units.Quantity(0., "1/(m2*s)")
else:
solar_flux = uibei(2, self.bandgap, self.temp_sun, 0)
solar_cell_flux = uibei(2, self.bandgap, self.temp_planet, electron_energy)
power_density = electron_energy * (solar_flux - solar_cell_flux)
return power_density.to("W/m^2")
|
|
a6f7316f7ad8065f77dc121318127b86f3431394
|
tests/test_extract.py
|
tests/test_extract.py
|
# -*- coding: utf-8 -*-
"""
test_extract
~~~~~~~~~~~~
Test data extraction on small document examples.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor import Document
from chemdataextractor.doc import Heading, Paragraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
unittest.util._MAX_LENGTH = 2000
class TestExtract(unittest.TestCase):
maxDiff = None
def test_melting_point_heading_salt(self):
"""Test extraction of melting point from a heading and paragraphs. Example taken from patent US06840965B2."""
d = Document(
Heading('D. Synthesis of 4-Amino-2-(3-thienyl)phenol Hydrochloride'),
Paragraph('3 g (13.5 mmoles) of 4-nitro-2-(3-thienyl)phenol was dissolved in 40 mL of ethanol and hydrogenated at 25° C. in the presence of 600 mg of a palladium—active carbon catalyst (10%). After the theoretically required amount of hydrogen had been absorbed, the catalyst was filtered off. Following concentration in a rotary evaporator, the reaction mixture was poured onto 20 mL of cold diethyl ether. The precipitated product was filtered off and dried.'),
Paragraph('This gave 1.95 g (75% of the theoretical) of 4-amino-2-(3-thienyl)phenol hydrochloride with a melting point of 130-132° C.')
)
expected = [
{'names': ['4-nitro-2-(3-thienyl)phenol']},
{'names': ['ethanol']},
{'names': ['palladium']},
{'names': ['carbon']},
{'names': ['hydrogen']},
{'names': ['diethyl ether']},
{'melting_points': [{'units': '°C', 'value': '130-132'}], 'names': ['4-Amino-2-(3-thienyl)phenol Hydrochloride', '4-amino-2-(3-thienyl)phenol hydrochloride'], 'roles': ['product']}
]
self.assertEqual(expected, d.records.serialize())
if __name__ == '__main__':
unittest.main()
|
Add more general extraction tests
|
Add more general extraction tests
|
Python
|
mit
|
mcs07/ChemDataExtractor,mcs07/ChemDataExtractor,mcs07/ChemDataExtractor
|
Add more general extraction tests
|
# -*- coding: utf-8 -*-
"""
test_extract
~~~~~~~~~~~~
Test data extraction on small document examples.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor import Document
from chemdataextractor.doc import Heading, Paragraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
unittest.util._MAX_LENGTH = 2000
class TestExtract(unittest.TestCase):
maxDiff = None
def test_melting_point_heading_salt(self):
"""Test extraction of melting point from a heading and paragraphs. Example taken from patent US06840965B2."""
d = Document(
Heading('D. Synthesis of 4-Amino-2-(3-thienyl)phenol Hydrochloride'),
Paragraph('3 g (13.5 mmoles) of 4-nitro-2-(3-thienyl)phenol was dissolved in 40 mL of ethanol and hydrogenated at 25° C. in the presence of 600 mg of a palladium—active carbon catalyst (10%). After the theoretically required amount of hydrogen had been absorbed, the catalyst was filtered off. Following concentration in a rotary evaporator, the reaction mixture was poured onto 20 mL of cold diethyl ether. The precipitated product was filtered off and dried.'),
Paragraph('This gave 1.95 g (75% of the theoretical) of 4-amino-2-(3-thienyl)phenol hydrochloride with a melting point of 130-132° C.')
)
expected = [
{'names': ['4-nitro-2-(3-thienyl)phenol']},
{'names': ['ethanol']},
{'names': ['palladium']},
{'names': ['carbon']},
{'names': ['hydrogen']},
{'names': ['diethyl ether']},
{'melting_points': [{'units': '°C', 'value': '130-132'}], 'names': ['4-Amino-2-(3-thienyl)phenol Hydrochloride', '4-amino-2-(3-thienyl)phenol hydrochloride'], 'roles': ['product']}
]
self.assertEqual(expected, d.records.serialize())
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add more general extraction tests<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_extract
~~~~~~~~~~~~
Test data extraction on small document examples.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor import Document
from chemdataextractor.doc import Heading, Paragraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
unittest.util._MAX_LENGTH = 2000
class TestExtract(unittest.TestCase):
maxDiff = None
def test_melting_point_heading_salt(self):
"""Test extraction of melting point from a heading and paragraphs. Example taken from patent US06840965B2."""
d = Document(
Heading('D. Synthesis of 4-Amino-2-(3-thienyl)phenol Hydrochloride'),
Paragraph('3 g (13.5 mmoles) of 4-nitro-2-(3-thienyl)phenol was dissolved in 40 mL of ethanol and hydrogenated at 25° C. in the presence of 600 mg of a palladium—active carbon catalyst (10%). After the theoretically required amount of hydrogen had been absorbed, the catalyst was filtered off. Following concentration in a rotary evaporator, the reaction mixture was poured onto 20 mL of cold diethyl ether. The precipitated product was filtered off and dried.'),
Paragraph('This gave 1.95 g (75% of the theoretical) of 4-amino-2-(3-thienyl)phenol hydrochloride with a melting point of 130-132° C.')
)
expected = [
{'names': ['4-nitro-2-(3-thienyl)phenol']},
{'names': ['ethanol']},
{'names': ['palladium']},
{'names': ['carbon']},
{'names': ['hydrogen']},
{'names': ['diethyl ether']},
{'melting_points': [{'units': '°C', 'value': '130-132'}], 'names': ['4-Amino-2-(3-thienyl)phenol Hydrochloride', '4-amino-2-(3-thienyl)phenol hydrochloride'], 'roles': ['product']}
]
self.assertEqual(expected, d.records.serialize())
if __name__ == '__main__':
unittest.main()
|
Add more general extraction tests# -*- coding: utf-8 -*-
"""
test_extract
~~~~~~~~~~~~
Test data extraction on small document examples.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor import Document
from chemdataextractor.doc import Heading, Paragraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
unittest.util._MAX_LENGTH = 2000
class TestExtract(unittest.TestCase):
maxDiff = None
def test_melting_point_heading_salt(self):
"""Test extraction of melting point from a heading and paragraphs. Example taken from patent US06840965B2."""
d = Document(
Heading('D. Synthesis of 4-Amino-2-(3-thienyl)phenol Hydrochloride'),
Paragraph('3 g (13.5 mmoles) of 4-nitro-2-(3-thienyl)phenol was dissolved in 40 mL of ethanol and hydrogenated at 25° C. in the presence of 600 mg of a palladium—active carbon catalyst (10%). After the theoretically required amount of hydrogen had been absorbed, the catalyst was filtered off. Following concentration in a rotary evaporator, the reaction mixture was poured onto 20 mL of cold diethyl ether. The precipitated product was filtered off and dried.'),
Paragraph('This gave 1.95 g (75% of the theoretical) of 4-amino-2-(3-thienyl)phenol hydrochloride with a melting point of 130-132° C.')
)
expected = [
{'names': ['4-nitro-2-(3-thienyl)phenol']},
{'names': ['ethanol']},
{'names': ['palladium']},
{'names': ['carbon']},
{'names': ['hydrogen']},
{'names': ['diethyl ether']},
{'melting_points': [{'units': '°C', 'value': '130-132'}], 'names': ['4-Amino-2-(3-thienyl)phenol Hydrochloride', '4-amino-2-(3-thienyl)phenol hydrochloride'], 'roles': ['product']}
]
self.assertEqual(expected, d.records.serialize())
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add more general extraction tests<commit_after># -*- coding: utf-8 -*-
"""
test_extract
~~~~~~~~~~~~
Test data extraction on small document examples.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor import Document
from chemdataextractor.doc import Heading, Paragraph
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
unittest.util._MAX_LENGTH = 2000
class TestExtract(unittest.TestCase):
maxDiff = None
def test_melting_point_heading_salt(self):
"""Test extraction of melting point from a heading and paragraphs. Example taken from patent US06840965B2."""
d = Document(
Heading('D. Synthesis of 4-Amino-2-(3-thienyl)phenol Hydrochloride'),
Paragraph('3 g (13.5 mmoles) of 4-nitro-2-(3-thienyl)phenol was dissolved in 40 mL of ethanol and hydrogenated at 25° C. in the presence of 600 mg of a palladium—active carbon catalyst (10%). After the theoretically required amount of hydrogen had been absorbed, the catalyst was filtered off. Following concentration in a rotary evaporator, the reaction mixture was poured onto 20 mL of cold diethyl ether. The precipitated product was filtered off and dried.'),
Paragraph('This gave 1.95 g (75% of the theoretical) of 4-amino-2-(3-thienyl)phenol hydrochloride with a melting point of 130-132° C.')
)
expected = [
{'names': ['4-nitro-2-(3-thienyl)phenol']},
{'names': ['ethanol']},
{'names': ['palladium']},
{'names': ['carbon']},
{'names': ['hydrogen']},
{'names': ['diethyl ether']},
{'melting_points': [{'units': '°C', 'value': '130-132'}], 'names': ['4-Amino-2-(3-thienyl)phenol Hydrochloride', '4-amino-2-(3-thienyl)phenol hydrochloride'], 'roles': ['product']}
]
self.assertEqual(expected, d.records.serialize())
if __name__ == '__main__':
unittest.main()
|
|
f8350d23dae60aa3e100e1249275923b5933d1ac
|
tests/functional/test_h2_required.py
|
tests/functional/test_h2_required.py
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_is, assert_in
from botocore.session import get_session
_H2_REQUIRED = object()
# Service names to list of known HTTP 2 operations
_KNOWN_SERVICES = {
'kinesis': ['SubscribeToShard'],
}
def test_all_uses_of_h2_are_known():
session = get_session()
loader = session.get_component('data_loader')
services = loader.list_available_services('service-2')
for service in services:
service_model = session.get_service_model(service)
h2_config = service_model.metadata.get('protocolSettings', {}).get('h2')
if h2_config == 'required':
yield _assert_h2_service_is_known, service
elif h2_config == 'eventstream':
for operation in service_model.operation_names:
operation_model = service_model.operation_model(operation)
if operation_model.has_event_stream_output:
yield _assert_h2_operation_is_known, service, operation
def _assert_h2_service_is_known(service):
# Validates that a service that requires HTTP 2 for all operations is known
message = 'Found unknown HTTP 2 service: %s' % service
assert_is(_KNOWN_SERVICES.get(service), _H2_REQUIRED, message)
def _assert_h2_operation_is_known(service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(service, [])
message = 'Found unknown HTTP 2 operation: %s.%s' % (service, operation)
assert_in(operation, known_operations, message)
|
Add validation for known h2 services
|
Add validation for known h2 services
|
Python
|
apache-2.0
|
pplu/botocore,boto/botocore
|
Add validation for known h2 services
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_is, assert_in
from botocore.session import get_session
_H2_REQUIRED = object()
# Service names to list of known HTTP 2 operations
_KNOWN_SERVICES = {
'kinesis': ['SubscribeToShard'],
}
def test_all_uses_of_h2_are_known():
session = get_session()
loader = session.get_component('data_loader')
services = loader.list_available_services('service-2')
for service in services:
service_model = session.get_service_model(service)
h2_config = service_model.metadata.get('protocolSettings', {}).get('h2')
if h2_config == 'required':
yield _assert_h2_service_is_known, service
elif h2_config == 'eventstream':
for operation in service_model.operation_names:
operation_model = service_model.operation_model(operation)
if operation_model.has_event_stream_output:
yield _assert_h2_operation_is_known, service, operation
def _assert_h2_service_is_known(service):
# Validates that a service that requires HTTP 2 for all operations is known
message = 'Found unknown HTTP 2 service: %s' % service
assert_is(_KNOWN_SERVICES.get(service), _H2_REQUIRED, message)
def _assert_h2_operation_is_known(service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(service, [])
message = 'Found unknown HTTP 2 operation: %s.%s' % (service, operation)
assert_in(operation, known_operations, message)
|
<commit_before><commit_msg>Add validation for known h2 services<commit_after>
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_is, assert_in
from botocore.session import get_session
_H2_REQUIRED = object()
# Service names to list of known HTTP 2 operations
_KNOWN_SERVICES = {
'kinesis': ['SubscribeToShard'],
}
def test_all_uses_of_h2_are_known():
session = get_session()
loader = session.get_component('data_loader')
services = loader.list_available_services('service-2')
for service in services:
service_model = session.get_service_model(service)
h2_config = service_model.metadata.get('protocolSettings', {}).get('h2')
if h2_config == 'required':
yield _assert_h2_service_is_known, service
elif h2_config == 'eventstream':
for operation in service_model.operation_names:
operation_model = service_model.operation_model(operation)
if operation_model.has_event_stream_output:
yield _assert_h2_operation_is_known, service, operation
def _assert_h2_service_is_known(service):
# Validates that a service that requires HTTP 2 for all operations is known
message = 'Found unknown HTTP 2 service: %s' % service
assert_is(_KNOWN_SERVICES.get(service), _H2_REQUIRED, message)
def _assert_h2_operation_is_known(service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(service, [])
message = 'Found unknown HTTP 2 operation: %s.%s' % (service, operation)
assert_in(operation, known_operations, message)
|
Add validation for known h2 services# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_is, assert_in
from botocore.session import get_session
_H2_REQUIRED = object()
# Service names to list of known HTTP 2 operations
_KNOWN_SERVICES = {
'kinesis': ['SubscribeToShard'],
}
def test_all_uses_of_h2_are_known():
session = get_session()
loader = session.get_component('data_loader')
services = loader.list_available_services('service-2')
for service in services:
service_model = session.get_service_model(service)
h2_config = service_model.metadata.get('protocolSettings', {}).get('h2')
if h2_config == 'required':
yield _assert_h2_service_is_known, service
elif h2_config == 'eventstream':
for operation in service_model.operation_names:
operation_model = service_model.operation_model(operation)
if operation_model.has_event_stream_output:
yield _assert_h2_operation_is_known, service, operation
def _assert_h2_service_is_known(service):
# Validates that a service that requires HTTP 2 for all operations is known
message = 'Found unknown HTTP 2 service: %s' % service
assert_is(_KNOWN_SERVICES.get(service), _H2_REQUIRED, message)
def _assert_h2_operation_is_known(service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(service, [])
message = 'Found unknown HTTP 2 operation: %s.%s' % (service, operation)
assert_in(operation, known_operations, message)
|
<commit_before><commit_msg>Add validation for known h2 services<commit_after># Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_is, assert_in
from botocore.session import get_session
_H2_REQUIRED = object()
# Service names to list of known HTTP 2 operations
_KNOWN_SERVICES = {
'kinesis': ['SubscribeToShard'],
}
def test_all_uses_of_h2_are_known():
session = get_session()
loader = session.get_component('data_loader')
services = loader.list_available_services('service-2')
for service in services:
service_model = session.get_service_model(service)
h2_config = service_model.metadata.get('protocolSettings', {}).get('h2')
if h2_config == 'required':
yield _assert_h2_service_is_known, service
elif h2_config == 'eventstream':
for operation in service_model.operation_names:
operation_model = service_model.operation_model(operation)
if operation_model.has_event_stream_output:
yield _assert_h2_operation_is_known, service, operation
def _assert_h2_service_is_known(service):
# Validates that a service that requires HTTP 2 for all operations is known
message = 'Found unknown HTTP 2 service: %s' % service
assert_is(_KNOWN_SERVICES.get(service), _H2_REQUIRED, message)
def _assert_h2_operation_is_known(service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(service, [])
message = 'Found unknown HTTP 2 operation: %s.%s' % (service, operation)
assert_in(operation, known_operations, message)
|
|
064e834ed602ba14d492f64468c538967b5753dc
|
zoe_master/tests/state_manager_test.py
|
zoe_master/tests/state_manager_test.py
|
# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_master.state.application import ApplicationDescription, ServiceDescription, ServiceEndpointDescription
def test_application():
ApplicationDescription()
def test_process():
ServiceDescription()
def test_process_endpoint():
ServiceEndpointDescription()
|
Add a new test for the state manager
|
Add a new test for the state manager
|
Python
|
apache-2.0
|
DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe
|
Add a new test for the state manager
|
# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_master.state.application import ApplicationDescription, ServiceDescription, ServiceEndpointDescription
def test_application():
ApplicationDescription()
def test_process():
ServiceDescription()
def test_process_endpoint():
ServiceEndpointDescription()
|
<commit_before><commit_msg>Add a new test for the state manager<commit_after>
|
# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_master.state.application import ApplicationDescription, ServiceDescription, ServiceEndpointDescription
def test_application():
ApplicationDescription()
def test_process():
ServiceDescription()
def test_process_endpoint():
ServiceEndpointDescription()
|
Add a new test for the state manager# Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_master.state.application import ApplicationDescription, ServiceDescription, ServiceEndpointDescription
def test_application():
ApplicationDescription()
def test_process():
ServiceDescription()
def test_process_endpoint():
ServiceEndpointDescription()
|
<commit_before><commit_msg>Add a new test for the state manager<commit_after># Copyright (c) 2016, Daniele Venzano
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zoe_master.state.application import ApplicationDescription, ServiceDescription, ServiceEndpointDescription
def test_application():
ApplicationDescription()
def test_process():
ServiceDescription()
def test_process_endpoint():
ServiceEndpointDescription()
|
|
3c0a992cbcc54c15835d2d95cf4f7e94f4764af0
|
digestive/__init__.py
|
digestive/__init__.py
|
from os import path
class Source:
"""
Data source context manager and reader.
"""
def __init__(self, source):
self.source = source
self.fd = None
def __len__(self):
return path.getsize(self.source)
def __enter__(self):
self.open()
return self
def open(self):
# open named source in binary mode for reading
self.fd = open(self.source, 'rb')
def readinto(self, buffer):
return self.fd.readinto(buffer)
def blocks(self, block_size=1 << 20):
"""
Generator for blocks of at most block_size read from this source.
:param block_size: Maximum number of bytes to read at a time.
:return: Data block generator.
"""
current, swap = memoryview(bytearray(block_size)), memoryview(bytearray(block_size))
num_read = self.readinto(current)
while num_read:
# yield the current block, excluding possible stale bytes not read
yield current[:num_read]
# swap buffers, allowing next block to be read into different buffer
current, swap = swap, current
num_read = self.readinto(current)
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.fd.close()
class Sink:
"""
Base class for digesting data in chunks.
"""
def __init__(self, name=None):
self.name = name
def update(self, data):
"""
Updates this digest with a chunk of data.
:param data: Chunk of data.
"""
pass
def digest(self):
"""
Creates the result of this digest and returns it.
:return: The result of this digest as a string.
"""
pass
|
Define Source and Sink classes
|
Define Source and Sink classes
|
Python
|
isc
|
akaIDIOT/Digestive
|
Define Source and Sink classes
|
from os import path
class Source:
"""
Data source context manager and reader.
"""
def __init__(self, source):
self.source = source
self.fd = None
def __len__(self):
return path.getsize(self.source)
def __enter__(self):
self.open()
return self
def open(self):
# open named source in binary mode for reading
self.fd = open(self.source, 'rb')
def readinto(self, buffer):
return self.fd.readinto(buffer)
def blocks(self, block_size=1 << 20):
"""
Generator for blocks of at most block_size read from this source.
:param block_size: Maximum number of bytes to read at a time.
:return: Data block generator.
"""
current, swap = memoryview(bytearray(block_size)), memoryview(bytearray(block_size))
num_read = self.readinto(current)
while num_read:
# yield the current block, excluding possible stale bytes not read
yield current[:num_read]
# swap buffers, allowing next block to be read into different buffer
current, swap = swap, current
num_read = self.readinto(current)
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.fd.close()
class Sink:
"""
Base class for digesting data in chunks.
"""
def __init__(self, name=None):
self.name = name
def update(self, data):
"""
Updates this digest with a chunk of data.
:param data: Chunk of data.
"""
pass
def digest(self):
"""
Creates the result of this digest and returns it.
:return: The result of this digest as a string.
"""
pass
|
<commit_before><commit_msg>Define Source and Sink classes<commit_after>
|
from os import path
class Source:
"""
Data source context manager and reader.
"""
def __init__(self, source):
self.source = source
self.fd = None
def __len__(self):
return path.getsize(self.source)
def __enter__(self):
self.open()
return self
def open(self):
# open named source in binary mode for reading
self.fd = open(self.source, 'rb')
def readinto(self, buffer):
return self.fd.readinto(buffer)
def blocks(self, block_size=1 << 20):
"""
Generator for blocks of at most block_size read from this source.
:param block_size: Maximum number of bytes to read at a time.
:return: Data block generator.
"""
current, swap = memoryview(bytearray(block_size)), memoryview(bytearray(block_size))
num_read = self.readinto(current)
while num_read:
# yield the current block, excluding possible stale bytes not read
yield current[:num_read]
# swap buffers, allowing next block to be read into different buffer
current, swap = swap, current
num_read = self.readinto(current)
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.fd.close()
class Sink:
"""
Base class for digesting data in chunks.
"""
def __init__(self, name=None):
self.name = name
def update(self, data):
"""
Updates this digest with a chunk of data.
:param data: Chunk of data.
"""
pass
def digest(self):
"""
Creates the result of this digest and returns it.
:return: The result of this digest as a string.
"""
pass
|
Define Source and Sink classesfrom os import path
class Source:
"""
Data source context manager and reader.
"""
def __init__(self, source):
self.source = source
self.fd = None
def __len__(self):
return path.getsize(self.source)
def __enter__(self):
self.open()
return self
def open(self):
# open named source in binary mode for reading
self.fd = open(self.source, 'rb')
def readinto(self, buffer):
return self.fd.readinto(buffer)
def blocks(self, block_size=1 << 20):
"""
Generator for blocks of at most block_size read from this source.
:param block_size: Maximum number of bytes to read at a time.
:return: Data block generator.
"""
current, swap = memoryview(bytearray(block_size)), memoryview(bytearray(block_size))
num_read = self.readinto(current)
while num_read:
# yield the current block, excluding possible stale bytes not read
yield current[:num_read]
# swap buffers, allowing next block to be read into different buffer
current, swap = swap, current
num_read = self.readinto(current)
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.fd.close()
class Sink:
"""
Base class for digesting data in chunks.
"""
def __init__(self, name=None):
self.name = name
def update(self, data):
"""
Updates this digest with a chunk of data.
:param data: Chunk of data.
"""
pass
def digest(self):
"""
Creates the result of this digest and returns it.
:return: The result of this digest as a string.
"""
pass
|
<commit_before><commit_msg>Define Source and Sink classes<commit_after>from os import path
class Source:
"""
Data source context manager and reader.
"""
def __init__(self, source):
self.source = source
self.fd = None
def __len__(self):
return path.getsize(self.source)
def __enter__(self):
self.open()
return self
def open(self):
# open named source in binary mode for reading
self.fd = open(self.source, 'rb')
def readinto(self, buffer):
return self.fd.readinto(buffer)
def blocks(self, block_size=1 << 20):
"""
Generator for blocks of at most block_size read from this source.
:param block_size: Maximum number of bytes to read at a time.
:return: Data block generator.
"""
current, swap = memoryview(bytearray(block_size)), memoryview(bytearray(block_size))
num_read = self.readinto(current)
while num_read:
# yield the current block, excluding possible stale bytes not read
yield current[:num_read]
# swap buffers, allowing next block to be read into different buffer
current, swap = swap, current
num_read = self.readinto(current)
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self):
self.fd.close()
class Sink:
"""
Base class for digesting data in chunks.
"""
def __init__(self, name=None):
self.name = name
def update(self, data):
"""
Updates this digest with a chunk of data.
:param data: Chunk of data.
"""
pass
def digest(self):
"""
Creates the result of this digest and returns it.
:return: The result of this digest as a string.
"""
pass
|
|
8803e65d61c590b25effc954287944b5d7aa5787
|
spec/web_service_spec.py
|
spec/web_service_spec.py
|
from expects import expect, equal
from primestg.report import Report
import responses
import requests
from primestg.service import Service
from zeep.transports import Transport
with description('Web services run'):
with before.all:
self.s = Service()
with it('asking for S02 report'):
with responses.RequestsMock() as rsps:
rsps.add(responses.POST, 'http://cct.gisce.lan:8080/',
body='{}', status=200,
content_type='application/json')
resp = self.s.get_daily_incremental('ZIV0040318130',
'20170610010000',
'20170611000000')
assert resp.json() == {"error": "not found"}
|
Add first version of test w/responses
|
Add first version of test w/responses
|
Python
|
agpl-3.0
|
gisce/primestg
|
Add first version of test w/responses
|
from expects import expect, equal
from primestg.report import Report
import responses
import requests
from primestg.service import Service
from zeep.transports import Transport
with description('Web services run'):
with before.all:
self.s = Service()
with it('asking for S02 report'):
with responses.RequestsMock() as rsps:
rsps.add(responses.POST, 'http://cct.gisce.lan:8080/',
body='{}', status=200,
content_type='application/json')
resp = self.s.get_daily_incremental('ZIV0040318130',
'20170610010000',
'20170611000000')
assert resp.json() == {"error": "not found"}
|
<commit_before><commit_msg>Add first version of test w/responses<commit_after>
|
from expects import expect, equal
from primestg.report import Report
import responses
import requests
from primestg.service import Service
from zeep.transports import Transport
with description('Web services run'):
with before.all:
self.s = Service()
with it('asking for S02 report'):
with responses.RequestsMock() as rsps:
rsps.add(responses.POST, 'http://cct.gisce.lan:8080/',
body='{}', status=200,
content_type='application/json')
resp = self.s.get_daily_incremental('ZIV0040318130',
'20170610010000',
'20170611000000')
assert resp.json() == {"error": "not found"}
|
Add first version of test w/responsesfrom expects import expect, equal
from primestg.report import Report
import responses
import requests
from primestg.service import Service
from zeep.transports import Transport
with description('Web services run'):
with before.all:
self.s = Service()
with it('asking for S02 report'):
with responses.RequestsMock() as rsps:
rsps.add(responses.POST, 'http://cct.gisce.lan:8080/',
body='{}', status=200,
content_type='application/json')
resp = self.s.get_daily_incremental('ZIV0040318130',
'20170610010000',
'20170611000000')
assert resp.json() == {"error": "not found"}
|
<commit_before><commit_msg>Add first version of test w/responses<commit_after>from expects import expect, equal
from primestg.report import Report
import responses
import requests
from primestg.service import Service
from zeep.transports import Transport
with description('Web services run'):
with before.all:
self.s = Service()
with it('asking for S02 report'):
with responses.RequestsMock() as rsps:
rsps.add(responses.POST, 'http://cct.gisce.lan:8080/',
body='{}', status=200,
content_type='application/json')
resp = self.s.get_daily_incremental('ZIV0040318130',
'20170610010000',
'20170611000000')
assert resp.json() == {"error": "not found"}
|
|
29f8e40a26f1774615ea4fceb6ea4a8990dcc7e8
|
example/query_bugs.py
|
example/query_bugs.py
|
#!/usr/bin/env python
import bugsy
from pdb import set_trace as bp
# This example demonstrates querying bugs with an API key for product "Foo"
bugzilla = bugsy.Bugsy(username='REDACTED', api_key='REDACTED')
bugs = bugzilla.search_for\
.product('Foo')\
.search()
for bug in bugs:
print(str(bug.id) + " " + bug.summary)
|
Add a bug query example
|
Add a bug query example
|
Python
|
apache-2.0
|
AutomatedTester/Bugsy
|
Add a bug query example
|
#!/usr/bin/env python
import bugsy
from pdb import set_trace as bp
# This example demonstrates querying bugs with an API key for product "Foo"
bugzilla = bugsy.Bugsy(username='REDACTED', api_key='REDACTED')
bugs = bugzilla.search_for\
.product('Foo')\
.search()
for bug in bugs:
print(str(bug.id) + " " + bug.summary)
|
<commit_before><commit_msg>Add a bug query example<commit_after>
|
#!/usr/bin/env python
import bugsy
from pdb import set_trace as bp
# This example demonstrates querying bugs with an API key for product "Foo"
bugzilla = bugsy.Bugsy(username='REDACTED', api_key='REDACTED')
bugs = bugzilla.search_for\
.product('Foo')\
.search()
for bug in bugs:
print(str(bug.id) + " " + bug.summary)
|
Add a bug query example#!/usr/bin/env python
import bugsy
from pdb import set_trace as bp
# This example demonstrates querying bugs with an API key for product "Foo"
bugzilla = bugsy.Bugsy(username='REDACTED', api_key='REDACTED')
bugs = bugzilla.search_for\
.product('Foo')\
.search()
for bug in bugs:
print(str(bug.id) + " " + bug.summary)
|
<commit_before><commit_msg>Add a bug query example<commit_after>#!/usr/bin/env python
import bugsy
from pdb import set_trace as bp
# This example demonstrates querying bugs with an API key for product "Foo"
bugzilla = bugsy.Bugsy(username='REDACTED', api_key='REDACTED')
bugs = bugzilla.search_for\
.product('Foo')\
.search()
for bug in bugs:
print(str(bug.id) + " " + bug.summary)
|
|
e70a73b8c365329f7a8cf86ad527b12358752266
|
sa-extract/sa2cdec.py
|
sa-extract/sa2cdec.py
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
Python
|
apache-2.0
|
pks/cdec-dtrain-legacy,kho/mr-cdec,kho/mr-cdec,pks/cdec-dtrain-legacy,pks/cdec-dtrain-legacy,kho/mr-cdec,kho/mr-cdec,pks/cdec-dtrain-legacy,kho/mr-cdec,pks/cdec-dtrain-legacy,pks/cdec-dtrain-legacy,kho/mr-cdec
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
<commit_before><commit_msg>Add script to convert from sa-extract's unnamed format to cdec's more readable named format<commit_after>
|
#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
Add script to convert from sa-extract's unnamed format to cdec's more readable named format#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
<commit_before><commit_msg>Add script to convert from sa-extract's unnamed format to cdec's more readable named format<commit_after>#!/usr/bin/env python
import sys
featNames = [ line.strip() for line in open(sys.argv[1]) if not line.startswith('#') ]
for line in sys.stdin:
try:
(lhs, src, tgt, feats, align) = line.strip("\n").split(' ||| ')
except:
print >>sys.stderr, 'WARNING: No alignments:', line
try:
(lhs, src, tgt, feats) = line.strip().split(' ||| ')
align = ''
except:
print >>sys.stderr, "ERROR: Malformed line:", line
raise
featValues = feats.split(' ')
namedFeats = ' '.join( name+"="+value for (name, value) in zip(featNames, featValues) )
print " ||| ".join( (lhs, src, tgt, namedFeats, align) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.