commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
489ac862bf5efc70d1af3933d1c518489a0e95a6 | tests/test_pubtator.py | tests/test_pubtator.py |
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 17
if __name__ == '__main__':
test_pubtator()
|
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 39
if __name__ == '__main__':
test_pubtator()
| Update pubtator test given new pubtator output | Update pubtator test given new pubtator output
| Python | mit | jakelever/kindred,jakelever/kindred |
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 17
if __name__ == '__main__':
test_pubtator()
Update pubtator test given new pubtator output |
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 39
if __name__ == '__main__':
test_pubtator()
| <commit_before>
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 17
if __name__ == '__main__':
test_pubtator()
<commit_msg>Update pubtator test given new pubtator output<commit_after> |
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 39
if __name__ == '__main__':
test_pubtator()
|
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 17
if __name__ == '__main__':
test_pubtator()
Update pubtator test given new pubtator output
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 39
if __name__ == '__main__':
test_pubtator()
| <commit_before>
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 17
if __name__ == '__main__':
test_pubtator()
<commit_msg>Update pubtator test given new pubtator output<commit_after>
import kindred
def test_pubtator():
data = kindred.pubtator.load([19894120,19894121])
assert isinstance(data,list)
for d in data:
assert isinstance(d,kindred.RelationData)
fileCount = len(data)
entityCount = sum([ len(d.getEntities()) for d in data ])
relationCount = sum([ len(d.getRelations()) for d in data ])
assert fileCount == 2
assert relationCount == 0
assert entityCount == 39
if __name__ == '__main__':
test_pubtator()
|
11103afa4a46cc1835f1479651bcd7c808d6a33c | sdks/python/apache_beam/runners/api/__init__.py | sdks/python/apache_beam/runners/api/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Checked in to avoid protoc dependency for Python development.
Regenerate files with::
protoc -I../common/runner-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
../common/runner-api/src/main/proto/*.proto
protoc -I../common/{fn,runner}-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
--grpc_python_out=apache_beam/runners/api/ \
../common/fn-api/src/main/proto/*.proto
"""
| Add instructions to regenerate Python proto wrappers. | Add instructions to regenerate Python proto wrappers.
| Python | apache-2.0 | apache/beam,staslev/incubator-beam,jbonofre/beam,iemejia/incubator-beam,lukecwik/incubator-beam,rangadi/beam,wtanaka/beam,rangadi/beam,markflyhigh/incubator-beam,chamikaramj/beam,manuzhang/beam,tgroh/incubator-beam,wangyum/beam,charlesccychen/beam,RyanSkraba/beam,manuzhang/beam,eljefe6a/incubator-beam,apache/beam,charlesccychen/beam,amarouni/incubator-beam,rangadi/incubator-beam,charlesccychen/incubator-beam,vikkyrk/incubator-beam,staslev/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,dhalperi/beam,sammcveety/incubator-beam,lukecwik/incubator-beam,apache/beam,chamikaramj/beam,rangadi/beam,apache/beam,markflyhigh/incubator-beam,peihe/incubator-beam,charlesccychen/beam,wangyum/beam,lukecwik/incubator-beam,wangyum/beam,robertwb/incubator-beam,wtanaka/beam,mxm/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,mxm/incubator-beam,lukecwik/incubator-beam,RyanSkraba/beam,robertwb/incubator-beam,apache/beam,robertwb/incubator-beam,manuzhang/beam,lukecwik/incubator-beam,jbonofre/incubator-beam,eljefe6a/incubator-beam,lukecwik/incubator-beam,dhalperi/beam,markflyhigh/incubator-beam,RyanSkraba/beam,robertwb/incubator-beam,wangyum/beam,chamikaramj/beam,chamikaramj/beam,jbonofre/incubator-beam,vikkyrk/incubator-beam,vikkyrk/incubator-beam,staslev/beam,jbonofre/beam,staslev/beam,tgroh/beam,RyanSkraba/beam,rangadi/incubator-beam,markflyhigh/incubator-beam,jbonofre/beam,sammcveety/incubator-beam,lukecwik/incubator-beam,iemejia/incubator-beam,markflyhigh/incubator-beam,amarouni/incubator-beam,tgroh/beam,eljefe6a/incubator-beam,staslev/beam,apache/beam,rangadi/beam,RyanSkraba/beam,markflyhigh/incubator-beam,apache/beam,manuzhang/incubator-beam,lukecwik/incubator-beam,dhalperi/beam,lukecwik/incubator-beam,tgroh/beam,rangadi/beam,dhalperi/incubator-beam,charlesccychen/beam,peihe/incubator-beam,dhalperi/incubator-beam,charlesccychen/beam,sammcveety/incubator-beam,chamikaramj/beam,charlesccychen/incubator-beam,robertwb/incubator-beam,peihe/incubator-beam,manuzhang/incubator-beam,yk5/beam,tgroh/beam,apache/beam,rangadi/incubator-beam,apache/beam,tgroh/incubator-beam,chamikaramj/beam,chamikaramj/beam,RyanSkraba/beam,lukecwik/incubator-beam,yk5/beam,rangadi/beam,wtanaka/beam,chamikaramj/beam,charlesccychen/beam,charlesccychen/beam,robertwb/incubator-beam,rangadi/beam,jbonofre/beam,chamikaramj/beam,apache/beam,charlesccychen/incubator-beam,robertwb/incubator-beam,apache/beam,RyanSkraba/beam,robertwb/incubator-beam,yk5/beam | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Add instructions to regenerate Python proto wrappers. | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Checked in to avoid protoc dependency for Python development.
Regenerate files with::
protoc -I../common/runner-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
../common/runner-api/src/main/proto/*.proto
protoc -I../common/{fn,runner}-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
--grpc_python_out=apache_beam/runners/api/ \
../common/fn-api/src/main/proto/*.proto
"""
| <commit_before>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<commit_msg>Add instructions to regenerate Python proto wrappers.<commit_after> | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Checked in to avoid protoc dependency for Python development.
Regenerate files with::
protoc -I../common/runner-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
../common/runner-api/src/main/proto/*.proto
protoc -I../common/{fn,runner}-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
--grpc_python_out=apache_beam/runners/api/ \
../common/fn-api/src/main/proto/*.proto
"""
| #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Add instructions to regenerate Python proto wrappers.#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Checked in to avoid protoc dependency for Python development.
Regenerate files with::
protoc -I../common/runner-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
../common/runner-api/src/main/proto/*.proto
protoc -I../common/{fn,runner}-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
--grpc_python_out=apache_beam/runners/api/ \
../common/fn-api/src/main/proto/*.proto
"""
| <commit_before>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<commit_msg>Add instructions to regenerate Python proto wrappers.<commit_after>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Checked in to avoid protoc dependency for Python development.
Regenerate files with::
protoc -I../common/runner-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
../common/runner-api/src/main/proto/*.proto
protoc -I../common/{fn,runner}-api/src/main/proto/ \
--python_out=apache_beam/runners/api/ \
--grpc_python_out=apache_beam/runners/api/ \
../common/fn-api/src/main/proto/*.proto
"""
|
7176ec5d4abe678d8f0d01baeacf4dc78204b18f | tests/integration/modules/grains.py | tests/integration/modules/grains.py | '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
| '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'os_family',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
| Add test to test if os_family grain is provided. | Add test to test if os_family grain is provided.
Corey Quinn reported a issue where __grains__['os_family'] returned a
KeyError. This commits adds a check to the grains module test to ensure
os_family is present.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
Add test to test if os_family grain is provided.
Corey Quinn reported a issue where __grains__['os_family'] returned a
KeyError. This commits adds a check to the grains module test to ensure
os_family is present. | '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'os_family',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
| <commit_before>'''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
<commit_msg>Add test to test if os_family grain is provided.
Corey Quinn reported a issue where __grains__['os_family'] returned a
KeyError. This commits adds a check to the grains module test to ensure
os_family is present.<commit_after> | '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'os_family',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
| '''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
Add test to test if os_family grain is provided.
Corey Quinn reported a issue where __grains__['os_family'] returned a
KeyError. This commits adds a check to the grains module test to ensure
os_family is present.'''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'os_family',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
| <commit_before>'''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
<commit_msg>Add test to test if os_family grain is provided.
Corey Quinn reported a issue where __grains__['os_family'] returned a
KeyError. This commits adds a check to the grains module test to ensure
os_family is present.<commit_after>'''
Test the grains module
'''
import integration
class TestModulesGrains(integration.ModuleCase):
'''
Test the grains module
'''
def test_items(self):
'''
grains.items
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.items')['test_grain'],
opts['grains']['test_grain']
)
def test_item(self):
'''
grains.item
'''
opts = self.minion_opts
self.assertEqual(
self.run_function('grains.item', ['test_grain']),
opts['grains']['test_grain']
)
def test_ls(self):
'''
grains.ls
'''
check_for = (
'cpuarch',
'cpu_flags',
'cpu_model',
'domain',
'fqdn',
'host',
'kernel',
'kernelrelease',
'localhost',
'mem_total',
'num_cpus',
'os',
'os_family',
'path',
'ps',
'pythonpath',
'pythonversion',
'saltpath',
'saltversion',
'virtual',
)
lsgrains = self.run_function('grains.ls')
for grain_name in check_for:
self.assertTrue(grain_name in lsgrains)
if __name__ == '__main__':
from integration import run_tests
run_tests(TestModulesGrains)
|
f3b960da24f2f9486eca3936a5be0d5001c24ab3 | package_name/module.py | package_name/module.py | """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `X` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
| """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
x : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `x` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
| Fix X -> x in cubic_rectification | DOC: Fix X -> x in cubic_rectification
| Python | mit | scottclowe/python-continuous-integration,scottclowe/python-ci,scottclowe/python-continuous-integration,scottclowe/python-ci | """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `X` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
DOC: Fix X -> x in cubic_rectification | """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
x : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `x` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
| <commit_before>"""
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `X` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
<commit_msg>DOC: Fix X -> x in cubic_rectification<commit_after> | """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
x : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `x` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
| """
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `X` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
DOC: Fix X -> x in cubic_rectification"""
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
x : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `x` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
| <commit_before>"""
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
X : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `X` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
<commit_msg>DOC: Fix X -> x in cubic_rectification<commit_after>"""
Module provides a simple cubic_rectification function.
"""
import numpy as np
def cubic_rectification(x):
"""
Rectified cube of an array.
Parameters
----------
x : numpy.ndarray
Input array.
Returns
-------
numpy.ndarray
Elementwise, the cube of `x` where it is positive and `0` otherwise.
Note
----
This is a sample function, using a Google docstring format.
Note
----
The use of intersphinx will cause numpy.ndarray above to link to its
documentation, but not inside this Note.
"""
return np.maximum(0, x ** 3)
|
6f199c9f2fb54931fa852700598b3c0fe24e40ad | viewflow/exceptions.py | viewflow/exceptions.py | class FlowRuntimeError(Exception):
"""Unrecovable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
| class FlowRuntimeError(Exception):
"""Unrecoverable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
| Fix typo in exception docstring | Fix typo in exception docstring | Python | agpl-3.0 | viewflow/viewflow,viewflow/viewflow,viewflow/viewflow | class FlowRuntimeError(Exception):
"""Unrecovable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
Fix typo in exception docstring | class FlowRuntimeError(Exception):
"""Unrecoverable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
| <commit_before>class FlowRuntimeError(Exception):
"""Unrecovable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
<commit_msg>Fix typo in exception docstring<commit_after> | class FlowRuntimeError(Exception):
"""Unrecoverable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
| class FlowRuntimeError(Exception):
"""Unrecovable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
Fix typo in exception docstringclass FlowRuntimeError(Exception):
"""Unrecoverable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
| <commit_before>class FlowRuntimeError(Exception):
"""Unrecovable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
<commit_msg>Fix typo in exception docstring<commit_after>class FlowRuntimeError(Exception):
"""Unrecoverable flow runtime error."""
class FlowLockFailed(Exception):
"""Flow lock failed."""
|
fb8f77553d75cdcf2c50423cb2e4159350a13e5a | tests/test_person.py | tests/test_person.py | import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", "Musonye")
my_class_instance.full_name()
| import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", 1234)
my_class_instance.full_name()
| Fix test case for class Person | Fix test case for class Person
| Python | mit | peterpaints/room-allocator | import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", "Musonye")
my_class_instance.full_name()
Fix test case for class Person | import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", 1234)
my_class_instance.full_name()
| <commit_before>import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", "Musonye")
my_class_instance.full_name()
<commit_msg>Fix test case for class Person<commit_after> | import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", 1234)
my_class_instance.full_name()
| import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", "Musonye")
my_class_instance.full_name()
Fix test case for class Personimport unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", 1234)
my_class_instance.full_name()
| <commit_before>import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", "Musonye")
my_class_instance.full_name()
<commit_msg>Fix test case for class Person<commit_after>import unittest
from classes.person import Person
class PersonClassTest(unittest.TestCase):
def test_full_name_only_returns_strings(self):
with self.assertRaises(ValueError, msg='Only strings are allowed as names'):
my_class_instance = Person("staff", "Peter", 1234)
my_class_instance.full_name()
|
892393458612ea78319cceeb98957c34ccb91d2d | django_react_templatetags/encoders.py | django_react_templatetags/encoders.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
# Allow backwards compability with react_representation prop
if not hasattr(o, 'to_react_representation'):
return o.react_representation
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
| Drop support for reacct_representation property | Drop support for reacct_representation property
| Python | mit | Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
# Allow backwards compability with react_representation prop
if not hasattr(o, 'to_react_representation'):
return o.react_representation
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
Drop support for reacct_representation property | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
# Allow backwards compability with react_representation prop
if not hasattr(o, 'to_react_representation'):
return o.react_representation
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
<commit_msg>Drop support for reacct_representation property<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
# Allow backwards compability with react_representation prop
if not hasattr(o, 'to_react_representation'):
return o.react_representation
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
Drop support for reacct_representation property# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
# Allow backwards compability with react_representation prop
if not hasattr(o, 'to_react_representation'):
return o.react_representation
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
<commit_msg>Drop support for reacct_representation property<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.serializers.json import DjangoJSONEncoder
from django_react_templatetags.mixins import RepresentationMixin
def json_encoder_cls_factory(context):
class ReqReactRepresentationJSONEncoder(ReactRepresentationJSONEncoder):
context = None
ReqReactRepresentationJSONEncoder.context = context
return ReqReactRepresentationJSONEncoder
class ReactRepresentationJSONEncoder(DjangoJSONEncoder):
'''
Custom json encoder that adds support for RepresentationMixin
'''
def default(self, o):
if isinstance(o, RepresentationMixin):
args = [self.context if hasattr(self, 'context') else None]
args = [x for x in args if x is not None]
return o.to_react_representation(*args)
return super(ReactRepresentationJSONEncoder, self).default(o)
|
cfe78dabea226e24928d26183f4b135c52b64663 | feder/cases/forms.py | feder/cases/forms.py | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if self.monitoring:
self.instance.monitoring = self.monitoring
super(CaseForm, self).save(*args, **kwargs)
class Meta:
model = Case
fields = ['name', 'institution']
| # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
if self.monitoring:
self.instance.monitoring = self.monitoring
class Meta:
model = Case
fields = ['name', 'institution']
| Clean up form in CaseForm | Clean up form in CaseForm
| Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if self.monitoring:
self.instance.monitoring = self.monitoring
super(CaseForm, self).save(*args, **kwargs)
class Meta:
model = Case
fields = ['name', 'institution']
Clean up form in CaseForm | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
if self.monitoring:
self.instance.monitoring = self.monitoring
class Meta:
model = Case
fields = ['name', 'institution']
| <commit_before># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if self.monitoring:
self.instance.monitoring = self.monitoring
super(CaseForm, self).save(*args, **kwargs)
class Meta:
model = Case
fields = ['name', 'institution']
<commit_msg>Clean up form in CaseForm<commit_after> | # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
if self.monitoring:
self.instance.monitoring = self.monitoring
class Meta:
model = Case
fields = ['name', 'institution']
| # -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if self.monitoring:
self.instance.monitoring = self.monitoring
super(CaseForm, self).save(*args, **kwargs)
class Meta:
model = Case
fields = ['name', 'institution']
Clean up form in CaseForm# -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
if self.monitoring:
self.instance.monitoring = self.monitoring
class Meta:
model = Case
fields = ['name', 'institution']
| <commit_before># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if self.monitoring:
self.instance.monitoring = self.monitoring
super(CaseForm, self).save(*args, **kwargs)
class Meta:
model = Case
fields = ['name', 'institution']
<commit_msg>Clean up form in CaseForm<commit_after># -*- coding: utf-8 -*-
from atom.ext.crispy_forms.forms import SingleButtonMixin
from braces.forms import UserKwargModelFormMixin
from django import forms
from .models import Case
class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.monitoring = kwargs.pop('monitoring', None)
super(CaseForm, self).__init__(*args, **kwargs)
if self.monitoring:
self.instance.monitoring = self.monitoring
class Meta:
model = Case
fields = ['name', 'institution']
|
06ec0a7f0a6a53fddfb2038b0ae8cc1bad2c8511 | blankspot/node_registration/models.py | blankspot/node_registration/models.py | from django.db import models
class Contact(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
def __unicode__(self):
return (self.nick)
def get_absolute_url(self):
return reverse('contact-detail', kwargs={'pk': self.pk})
class Position(models.Model):
contact = models.ForeignKey('Contact')
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
| from django.db import models
class Position(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
| Revert splitting of model as its adding to much complexitiy for the timebeing to later logics IIt's just not adding enought value for having a more complicated implementation. | Revert splitting of model as its adding to much complexitiy for the timebeing to later logics
IIt's just not adding enought value for having a more complicated implementation.
| Python | agpl-3.0 | frlan/blankspot | from django.db import models
class Contact(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
def __unicode__(self):
return (self.nick)
def get_absolute_url(self):
return reverse('contact-detail', kwargs={'pk': self.pk})
class Position(models.Model):
contact = models.ForeignKey('Contact')
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
Revert splitting of model as its adding to much complexitiy for the timebeing to later logics
IIt's just not adding enought value for having a more complicated implementation. | from django.db import models
class Position(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
| <commit_before>from django.db import models
class Contact(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
def __unicode__(self):
return (self.nick)
def get_absolute_url(self):
return reverse('contact-detail', kwargs={'pk': self.pk})
class Position(models.Model):
contact = models.ForeignKey('Contact')
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
<commit_msg>Revert splitting of model as its adding to much complexitiy for the timebeing to later logics
IIt's just not adding enought value for having a more complicated implementation.<commit_after> | from django.db import models
class Position(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
| from django.db import models
class Contact(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
def __unicode__(self):
return (self.nick)
def get_absolute_url(self):
return reverse('contact-detail', kwargs={'pk': self.pk})
class Position(models.Model):
contact = models.ForeignKey('Contact')
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
Revert splitting of model as its adding to much complexitiy for the timebeing to later logics
IIt's just not adding enought value for having a more complicated implementation.from django.db import models
class Position(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
| <commit_before>from django.db import models
class Contact(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
def __unicode__(self):
return (self.nick)
def get_absolute_url(self):
return reverse('contact-detail', kwargs={'pk': self.pk})
class Position(models.Model):
contact = models.ForeignKey('Contact')
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
<commit_msg>Revert splitting of model as its adding to much complexitiy for the timebeing to later logics
IIt's just not adding enought value for having a more complicated implementation.<commit_after>from django.db import models
class Position(models.Model):
first_name = models.CharField(max_length=50, blank=True, null=True)
last_name = models.CharField(max_length=50, blank=True, null=True)
nick = models.CharField(max_length=128)
email = models.EmailField(max_length=254)
street = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
address_description = models.TextField(blank=True, null=True)
altitude = models.FloatField(blank=True, null=True)
latitude = models.FloatField(blank=True, null=True)
def __unicode__(self):
return (self.street)
def get_absolute_url(self):
return reverse('position-detail', kwargs={'pk': self.pk})
|
b86d88a10839ba642f992dcaf3e69de3a244f984 | golingo/urls.py | golingo/urls.py | """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
| """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from quiz.views import QuestionTemplateView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^question/$', QuestionTemplateView.as_view(), name='question'),
]
| Add initial url to question | Add initial url to question
| Python | bsd-3-clause | jesuejunior/golingo,jesuejunior/golingo,jesuejunior/golingo | """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
Add initial url to question | """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from quiz.views import QuestionTemplateView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^question/$', QuestionTemplateView.as_view(), name='question'),
]
| <commit_before>"""golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
<commit_msg>Add initial url to question<commit_after> | """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from quiz.views import QuestionTemplateView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^question/$', QuestionTemplateView.as_view(), name='question'),
]
| """golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
Add initial url to question"""golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from quiz.views import QuestionTemplateView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^question/$', QuestionTemplateView.as_view(), name='question'),
]
| <commit_before>"""golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
<commit_msg>Add initial url to question<commit_after>"""golingo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from quiz.views import QuestionTemplateView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^question/$', QuestionTemplateView.as_view(), name='question'),
]
|
4a330e190dcb727cb7483b826f2927b94b081e8a | yardcam.py | yardcam.py | import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
wait = delay.next_capture() # Delay time in seconds from delay.py
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
| import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
# wait = delay.next_capture() # Delay time in seconds from delay.py
wait = 60
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
| Remove delay from loop for testing | Remove delay from loop for testing
| Python | mit | gnfrazier/YardCam | import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
wait = delay.next_capture() # Delay time in seconds from delay.py
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
Remove delay from loop for testing | import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
# wait = delay.next_capture() # Delay time in seconds from delay.py
wait = 60
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
| <commit_before>import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
wait = delay.next_capture() # Delay time in seconds from delay.py
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
<commit_msg>Remove delay from loop for testing<commit_after> | import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
# wait = delay.next_capture() # Delay time in seconds from delay.py
wait = 60
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
| import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
wait = delay.next_capture() # Delay time in seconds from delay.py
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
Remove delay from loop for testingimport capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
# wait = delay.next_capture() # Delay time in seconds from delay.py
wait = 60
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
| <commit_before>import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
wait = delay.next_capture() # Delay time in seconds from delay.py
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
<commit_msg>Remove delay from loop for testing<commit_after>import capture
from picamera import PiCamera
import time
import delay
def image_cap_loop(camera, status=None):
"""Set image parameters, capture image, set wait time, repeat"""
resolution = (1640, 1232)
# wait = delay.next_capture() # Delay time in seconds from delay.py
wait = 60
waithours = wait / 60 / 60 # Convert seconds to hours
print('Next capture begins in {} hours.'.format(waithours))
time.sleep(wait)
images = 18
for i in range(images):
latest = capture.cap(camera, resolution, status)
status = latest[0]
capture.copy_latest(latest[1])
time.sleep(300)
status = camera.shutdown(camera)
image_cap_loop(camera, status)
return latest
def main():
camera = PiCamera()
image_cap_loop(camera)
print("Images captured")
if __name__ == '__main__':
main()
|
f2d3d52c4118b9ff0161a8db5d53d83c199918e3 | gunicorn_cfg.py | gunicorn_cfg.py | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application loglevel
loglevel = 'debug'
| """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| Update gunicorn config with docs | Update gunicorn config with docs
| Python | bsd-3-clause | mehdisadeghi/sqmpy,simphony/sqmpy,mehdisadeghi/sqmpy,simphony/sqmpy,simphony/sqmpy,mehdisadeghi/sqmpy | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application loglevel
loglevel = 'debug'
Update gunicorn config with docs | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| <commit_before>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application loglevel
loglevel = 'debug'
<commit_msg>Update gunicorn config with docs<commit_after> | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application loglevel
loglevel = 'debug'
Update gunicorn config with docs"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| <commit_before>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application loglevel
loglevel = 'debug'
<commit_msg>Update gunicorn config with docs<commit_after>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:3000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
|
3f6b18304a3f947cc165201a507a672a56af851f | warehouse/cli.py | warehouse/cli.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
def include(module):
return getattr(module, "__commands__", {})
__commands__ = {
"migrate": include(warehouse.migrations.cli),
}
| # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
__commands__ = {
"migrate": warehouse.migrations.cli.__commands__,
}
| Remove a useless function call | Remove a useless function call
| Python | apache-2.0 | robhudson/warehouse,mattrobenolt/warehouse,techtonik/warehouse,mattrobenolt/warehouse,mattrobenolt/warehouse,robhudson/warehouse,techtonik/warehouse | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
def include(module):
return getattr(module, "__commands__", {})
__commands__ = {
"migrate": include(warehouse.migrations.cli),
}
Remove a useless function call | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
__commands__ = {
"migrate": warehouse.migrations.cli.__commands__,
}
| <commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
def include(module):
return getattr(module, "__commands__", {})
__commands__ = {
"migrate": include(warehouse.migrations.cli),
}
<commit_msg>Remove a useless function call<commit_after> | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
__commands__ = {
"migrate": warehouse.migrations.cli.__commands__,
}
| # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
def include(module):
return getattr(module, "__commands__", {})
__commands__ = {
"migrate": include(warehouse.migrations.cli),
}
Remove a useless function call# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
__commands__ = {
"migrate": warehouse.migrations.cli.__commands__,
}
| <commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
def include(module):
return getattr(module, "__commands__", {})
__commands__ = {
"migrate": include(warehouse.migrations.cli),
}
<commit_msg>Remove a useless function call<commit_after># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import warehouse.migrations.cli
__commands__ = {
"migrate": warehouse.migrations.cli.__commands__,
}
|
aff8cebfd168493a4a9dff77cf9722507429d570 | contrib/examples/actions/pythonactions/isprime.py | contrib/examples/actions/pythonactions/isprime.py | import math
class PrimeChecker(object):
def run(self, **kwargs):
return self._is_prime(**kwargs)
def _is_prime(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
| import math
class PrimeChecker(object):
def run(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
| Update pythonaction sample for simpler run. | Update pythonaction sample for simpler run.
| Python | apache-2.0 | peak6/st2,lakshmi-kannan/st2,pixelrebel/st2,StackStorm/st2,jtopjian/st2,pinterb/st2,Plexxi/st2,punalpatel/st2,armab/st2,grengojbo/st2,grengojbo/st2,punalpatel/st2,pixelrebel/st2,Itxaka/st2,lakshmi-kannan/st2,emedvedev/st2,lakshmi-kannan/st2,pixelrebel/st2,nzlosh/st2,peak6/st2,dennybaa/st2,pinterb/st2,Plexxi/st2,nzlosh/st2,Itxaka/st2,grengojbo/st2,alfasin/st2,nzlosh/st2,pinterb/st2,Plexxi/st2,jtopjian/st2,emedvedev/st2,StackStorm/st2,armab/st2,jtopjian/st2,StackStorm/st2,dennybaa/st2,alfasin/st2,emedvedev/st2,peak6/st2,punalpatel/st2,tonybaloney/st2,Plexxi/st2,tonybaloney/st2,alfasin/st2,nzlosh/st2,StackStorm/st2,tonybaloney/st2,dennybaa/st2,armab/st2,Itxaka/st2 | import math
class PrimeChecker(object):
def run(self, **kwargs):
return self._is_prime(**kwargs)
def _is_prime(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
Update pythonaction sample for simpler run. | import math
class PrimeChecker(object):
def run(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
| <commit_before>import math
class PrimeChecker(object):
def run(self, **kwargs):
return self._is_prime(**kwargs)
def _is_prime(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
<commit_msg>Update pythonaction sample for simpler run.<commit_after> | import math
class PrimeChecker(object):
def run(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
| import math
class PrimeChecker(object):
def run(self, **kwargs):
return self._is_prime(**kwargs)
def _is_prime(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
Update pythonaction sample for simpler run.import math
class PrimeChecker(object):
def run(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
| <commit_before>import math
class PrimeChecker(object):
def run(self, **kwargs):
return self._is_prime(**kwargs)
def _is_prime(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
<commit_msg>Update pythonaction sample for simpler run.<commit_after>import math
class PrimeChecker(object):
def run(self, value=0):
if math.floor(value) != value:
raise ValueError('%s should be an integer.' % value)
if value < 2:
return False
for test in range(2, int(math.floor(math.sqrt(value)))+1):
if value % test == 0:
return False
return True
if __name__ == '__main__':
checker = PrimeChecker()
for i in range(0, 10):
print '%s : %s' % (i, checker.run(**{'value': i}))
|
035ae3b2acf5c29304a1c2ec327feb5cc7160559 | django_vend/core/forms.py | django_vend/core/forms.py | import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
| import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
elif value == "null":
value = None
return super(VendDateTimeField, self).to_python(value)
| Allow VendDateTimeField to accept null dates (if required is set to False) | Allow VendDateTimeField to accept null dates (if required is set to False)
| Python | bsd-3-clause | remarkablerocket/django-vend,remarkablerocket/django-vend | import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
Allow VendDateTimeField to accept null dates (if required is set to False) | import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
elif value == "null":
value = None
return super(VendDateTimeField, self).to_python(value)
| <commit_before>import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
<commit_msg>Allow VendDateTimeField to accept null dates (if required is set to False)<commit_after> | import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
elif value == "null":
value = None
return super(VendDateTimeField, self).to_python(value)
| import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
Allow VendDateTimeField to accept null dates (if required is set to False)import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
elif value == "null":
value = None
return super(VendDateTimeField, self).to_python(value)
| <commit_before>import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
<commit_msg>Allow VendDateTimeField to accept null dates (if required is set to False)<commit_after>import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
elif value == "null":
value = None
return super(VendDateTimeField, self).to_python(value)
|
080637c99898082d38b306ef73983552b263e628 | inbox/ignition.py | inbox/ignition.py | from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
| from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
pool_recycle=3600,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
| Set pool_recycle to deal with MySQL closing idle connections. | Set pool_recycle to deal with MySQL closing idle connections.
See http://docs.sqlalchemy.org/en/latest/dialects/mysql.html#connection-timeouts
Cherry-picking this onto master so it definitely gets deployed.
| Python | agpl-3.0 | Eagles2F/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,nylas/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,ErinCall/sync-engine,gale320/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,nylas/sync-engine,gale320/sync-engine,closeio/nylas,gale320/sync-engine | from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
Set pool_recycle to deal with MySQL closing idle connections.
See http://docs.sqlalchemy.org/en/latest/dialects/mysql.html#connection-timeouts
Cherry-picking this onto master so it definitely gets deployed. | from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
pool_recycle=3600,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
| <commit_before>from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
<commit_msg>Set pool_recycle to deal with MySQL closing idle connections.
See http://docs.sqlalchemy.org/en/latest/dialects/mysql.html#connection-timeouts
Cherry-picking this onto master so it definitely gets deployed.<commit_after> | from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
pool_recycle=3600,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
| from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
Set pool_recycle to deal with MySQL closing idle connections.
See http://docs.sqlalchemy.org/en/latest/dialects/mysql.html#connection-timeouts
Cherry-picking this onto master so it definitely gets deployed.from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
pool_recycle=3600,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
| <commit_before>from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
<commit_msg>Set pool_recycle to deal with MySQL closing idle connections.
See http://docs.sqlalchemy.org/en/latest/dialects/mysql.html#connection-timeouts
Cherry-picking this onto master so it definitely gets deployed.<commit_after>from sqlalchemy import create_engine
from inbox.sqlalchemy_ext.util import ForceStrictMode
from inbox.config import db_uri, config
DB_POOL_SIZE = config.get_required('DB_POOL_SIZE')
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=5):
engine = create_engine(db_uri(),
listeners=[ForceStrictMode()],
isolation_level='READ COMMITTED',
echo=False,
pool_size=pool_size,
pool_recycle=3600,
max_overflow=max_overflow,
connect_args={'charset': 'utf8mb4'})
return engine
def init_db():
""" Make the tables.
This is called only from bin/create-db, which is run during setup.
Previously we allowed this to run everytime on startup, which broke some
alembic revisions by creating new tables before a migration was run.
From now on, we should ony be creating tables+columns via SQLalchemy *once*
and all subscequent changes done via migration scripts.
"""
from inbox.models.base import MailSyncBase
engine = main_engine(pool_size=1)
MailSyncBase.metadata.create_all(engine)
|
1513532e473866438ac9dabbfb462e9348a5895e | hug/output_format.py | hug/output_format.py | import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
| import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
| Add the ability for individual objects to define how they would like there data to be outputed for json | Add the ability for individual objects to define how they would like there data to be outputed for json
| Python | mit | janusnic/hug,yasoob/hug,janusnic/hug,shaunstanislaus/hug,timothycrosley/hug,alisaifee/hug,gbn972/hug,MuhammadAlkarouri/hug,philiptzou/hug,giserh/hug,timothycrosley/hug,STANAPO/hug,shaunstanislaus/hug,STANAPO/hug,origingod/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,alisaifee/hug,giserh/hug,yasoob/hug,gbn972/hug,philiptzou/hug,jean/hug,timothycrosley/hug,jean/hug,origingod/hug | import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
Add the ability for individual objects to define how they would like there data to be outputed for json | import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
| <commit_before>import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
<commit_msg>Add the ability for individual objects to define how they would like there data to be outputed for json<commit_after> | import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
| import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
Add the ability for individual objects to define how they would like there data to be outputed for jsonimport json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
| <commit_before>import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
<commit_msg>Add the ability for individual objects to define how they would like there data to be outputed for json<commit_after>import json as json_converter
from datetime import date, datetime
from hug.format import content_type
def _json_converter(item):
if isinstance(item, (date, datetime)):
return item.isoformat()
elif isinstance(item, bytes):
return item.decode('utf8')
elif getattr(item, '__json__', None):
return item.__json__()
raise TypeError("Type not serializable")
@content_type('application/json')
def json(content, **kwargs):
"""JSON (Javascript Serialized Object Notation)"""
return json_converter.dumps(content, default=_json_converter, **kwargs).encode('utf8')
@content_type('text/plain')
def text(content):
"""Free form UTF8 text"""
return content.encode('utf8')
def _camelcase(dictionary):
if not isinstance(dictionary, dict):
return dictionary
new_dictionary = {}
for key, value in dictionary.items():
if isinstance(key, str):
key = key[0] + "".join(key.title().split('_'))[1:]
new_dictionary[key] = _camelcase(value)
return new_dictionary
@content_type('application/json')
def json_camelcase(content):
"""JSON (Javascript Serialized Object Notation) with all keys camelCased"""
return json(_camelcase(content))
@content_type('application/json')
def pretty_json(content):
"""JSON (Javascript Serialized Object Notion) pretty printed and indented"""
return json(content, indent=4, separators=(',', ': '))
|
e507461dba5020726c9505fef187098ad234a68a | kazoo/tests/__init__.py | kazoo/tests/__init__.py | import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
| import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
| Use SkipTest that works on Py2.6 | Use SkipTest that works on Py2.6
| Python | apache-2.0 | kormat/kazoo,rackerlabs/kazoo,tempbottle/kazoo,max0d41/kazoo,rgs1/kazoo,rockerbox/kazoo,harlowja/kazoo,kormat/kazoo,rgs1/kazoo,harlowja/kazoo,pombredanne/kazoo,python-zk/kazoo,python-zk/kazoo,pombredanne/kazoo,rockerbox/kazoo,tempbottle/kazoo,AlexanderplUs/kazoo,jacksontj/kazoo,max0d41/kazoo,Asana/kazoo,jacksontj/kazoo,rackerlabs/kazoo,bsanders/kazoo,bsanders/kazoo,AlexanderplUs/kazoo | import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
Use SkipTest that works on Py2.6 | import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
| <commit_before>import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
<commit_msg>Use SkipTest that works on Py2.6<commit_after> | import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
| import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
Use SkipTest that works on Py2.6import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
| <commit_before>import os
import unittest
import time
import uuid
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise unittest.SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
<commit_msg>Use SkipTest that works on Py2.6<commit_after>import os
import unittest
import time
import uuid
from nose import SkipTest
from kazoo.client import KazooClient, KazooState
# if this env variable is set, ZK client integration tests are run
# against the specified host list
ENV_TEST_HOSTS = "KAZOO_TEST_HOSTS"
def get_hosts_or_skip():
if ENV_TEST_HOSTS in os.environ:
return os.environ[ENV_TEST_HOSTS]
raise SkipTest("Skipping ZooKeeper test. To run, set " +
"%s env to a host list. (ex: localhost:2181)" %
ENV_TEST_HOSTS)
def get_client_or_skip(**kwargs):
hosts = get_hosts_or_skip()
return KazooClient(hosts, **kwargs)
def until_timeout(timeout, value=None):
"""Returns an iterator that repeats until a timeout is reached
timeout is in seconds
"""
start = time.time()
while True:
if time.time() - start >= timeout:
raise Exception("timed out before success!")
yield value
class KazooTestCase(unittest.TestCase):
def _get_client(self):
return KazooClient(self.hosts)
def setUp(self):
namespace = "/kazootests" + uuid.uuid4().hex
self.hosts = get_hosts_or_skip() + namespace
self.client = self._get_client()
def tearDown(self):
if self.client.state == KazooState.LOST:
self.client.connect()
self.client.stop()
|
544f00f73657bbc06f4dd7f5faae2ce2546fe788 | lava_server/__init__.py | lava_server/__init__.py | # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "dev", 0)
| # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "final", 0)
| Mark final for 0.2 release | Mark final for 0.2 release
| Python | agpl-3.0 | Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server | # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "dev", 0)
Mark final for 0.2 release | # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "final", 0)
| <commit_before># Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "dev", 0)
<commit_msg>Mark final for 0.2 release<commit_after> | # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "final", 0)
| # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "dev", 0)
Mark final for 0.2 release# Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "final", 0)
| <commit_before># Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "dev", 0)
<commit_msg>Mark final for 0.2 release<commit_after># Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 2, 0, "final", 0)
|
f55af10f1767d39fdba65fb4c17beee526f96748 | lib/__init__.py | lib/__init__.py | """retriever.lib contains the core EcoData Retriever modules."""
|
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
| Check for and use system proxies for downloading files | Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them | Python | mit | embaldridge/retriever,davharris/retriever,davharris/retriever,davharris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret,goelakash/retriever,henrykironde/deletedret,embaldridge/retriever | """retriever.lib contains the core EcoData Retriever modules."""
Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them |
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
| <commit_before>"""retriever.lib contains the core EcoData Retriever modules."""
<commit_msg>Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them<commit_after> |
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
| """retriever.lib contains the core EcoData Retriever modules."""
Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
| <commit_before>"""retriever.lib contains the core EcoData Retriever modules."""
<commit_msg>Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them<commit_after>
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
|
a0d32bb36674863a0e9a04aa97e8c1f7e8ca0f37 | lecturer/tests.py | lecturer/tests.py |
from django.test import Client, TestCase
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
|
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
def test_lecturer_login(self):
user = get_user_model().objects.create_user('test_user', 'test@test.com', 'kNouYH8J3KjJH3')
user.save()
# Test if lecturer is logged in upon login-request
self.assertEqual(c.post('/login/', {'username': 'test_user', 'password': 'kNouYH8J3KjJH3'}).status_code, 200) | Test if lecturer is logged in upon login-request | Test if lecturer is logged in upon login-request
| Python | mit | martinlunde/RealBack,martinlunde/RealBack,martinlunde/RealBack |
from django.test import Client, TestCase
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
Test if lecturer is logged in upon login-request |
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
def test_lecturer_login(self):
user = get_user_model().objects.create_user('test_user', 'test@test.com', 'kNouYH8J3KjJH3')
user.save()
# Test if lecturer is logged in upon login-request
self.assertEqual(c.post('/login/', {'username': 'test_user', 'password': 'kNouYH8J3KjJH3'}).status_code, 200) | <commit_before>
from django.test import Client, TestCase
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
<commit_msg>Test if lecturer is logged in upon login-request<commit_after> |
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
def test_lecturer_login(self):
user = get_user_model().objects.create_user('test_user', 'test@test.com', 'kNouYH8J3KjJH3')
user.save()
# Test if lecturer is logged in upon login-request
self.assertEqual(c.post('/login/', {'username': 'test_user', 'password': 'kNouYH8J3KjJH3'}).status_code, 200) |
from django.test import Client, TestCase
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
Test if lecturer is logged in upon login-request
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
def test_lecturer_login(self):
user = get_user_model().objects.create_user('test_user', 'test@test.com', 'kNouYH8J3KjJH3')
user.save()
# Test if lecturer is logged in upon login-request
self.assertEqual(c.post('/login/', {'username': 'test_user', 'password': 'kNouYH8J3KjJH3'}).status_code, 200) | <commit_before>
from django.test import Client, TestCase
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
<commit_msg>Test if lecturer is logged in upon login-request<commit_after>
from django.test import Client, TestCase
from django.contrib.auth import get_user_model
c = Client()
class WebsiteStabilityTestCase(TestCase):
def test_availability(self):
self.assertEqual(c.get('/lecturer/').status_code, 302) # We are getting redirect when not logged in, so 302
# TODO maybe check for something more reliable than 302?
def test_lecturer_login(self):
user = get_user_model().objects.create_user('test_user', 'test@test.com', 'kNouYH8J3KjJH3')
user.save()
# Test if lecturer is logged in upon login-request
self.assertEqual(c.post('/login/', {'username': 'test_user', 'password': 'kNouYH8J3KjJH3'}).status_code, 200) |
0867054258e231b2ce9b028c5ce2bc3a26bca7be | gamernews/apps/threadedcomments/views.py | gamernews/apps/threadedcomments/views.py | from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted( request ):
if request.GET['c']:
comment_id, blob_id = request.GET['c'].split( ':' )
blob = Blob.objects.get( pk=blob_id )
if post:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
| from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
| Remove name, url and email from comment form | Remove name, url and email from comment form
| Python | mit | underlost/GamerNews,underlost/GamerNews | from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted( request ):
if request.GET['c']:
comment_id, blob_id = request.GET['c'].split( ':' )
blob = Blob.objects.get( pk=blob_id )
if post:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
Remove name, url and email from comment form | from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
| <commit_before>from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted( request ):
if request.GET['c']:
comment_id, blob_id = request.GET['c'].split( ':' )
blob = Blob.objects.get( pk=blob_id )
if post:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
<commit_msg>Remove name, url and email from comment form<commit_after> | from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
| from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted( request ):
if request.GET['c']:
comment_id, blob_id = request.GET['c'].split( ':' )
blob = Blob.objects.get( pk=blob_id )
if post:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
Remove name, url and email from comment formfrom django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
| <commit_before>from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted( request ):
if request.GET['c']:
comment_id, blob_id = request.GET['c'].split( ':' )
blob = Blob.objects.get( pk=blob_id )
if post:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
<commit_msg>Remove name, url and email from comment form<commit_after>from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from django.views.generic.list import ListView
from core.models import Account as User
from django_comments.models import Comment
from news.models import Blob, BlobInstance
from .models import ThreadedComment
def single_comment(request, id):
comment = get_object_or_404(ThreadedComment, id=id)
variables = RequestContext(request, {'comment': comment})
return render_to_response('comments/single.html', variables)
def comment_posted(request):
if request.GET['c']:
comment_id, blob_id = request.GET['c']
comment = Comment.objects.get( pk=comment_id )
blob = Blob.objects.get(pk=blob_id)
if blob:
return HttpResponseRedirect( blob.get_absolute_url() )
return HttpResponseRedirect( "/" )
|
8cd11782d4b3558d204f438accdc15b3b702839b | unn/cli.py | unn/cli.py | import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
| import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
| Add a helpful message if no command given | Add a helpful message if no command given
| Python | mit | runningskull/unn | import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
Add a helpful message if no command given | import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
| <commit_before>import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
<commit_msg>Add a helpful message if no command given<commit_after> | import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
| import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
Add a helpful message if no command givenimport sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
| <commit_before>import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
<commit_msg>Add a helpful message if no command given<commit_after>import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
|
ab6526b14f5bdc544367bcaa281a861d2314330b | gi2fasta.py | gi2fasta.py | import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "davidsshin@lbl.gov"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
| import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
| Add error message if User does not enter email address | Add error message if User does not enter email address
| Python | bsd-2-clause | datadaveshin/bioinformatics,datadaveshin/bioinformatics | import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "davidsshin@lbl.gov"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
Add error message if User does not enter email address | import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
| <commit_before>import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "davidsshin@lbl.gov"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
<commit_msg>Add error message if User does not enter email address<commit_after> | import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
| import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "davidsshin@lbl.gov"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
Add error message if User does not enter email addressimport sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
| <commit_before>import sys
from Bio import Entrez
from Bio import SeqIO
Entrez.email = "davidsshin@lbl.gov"
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
<commit_msg>Add error message if User does not enter email address<commit_after>import sys
#from Bio import Entrez
#from Bio import SeqIO
user_email = "" # User must supply email here to access NCBI api
# Add error message in the event no email address is supplied
if user_email == "":
sys.exit("Error: Please supply your email address to line 5 of gi2fasta.py")
Entrez.email = user_email
infilename = sys.argv[1]
outfilename = sys.argv[2]
with open(infilename) as f:
gi_numbers=', '.join(line.rstrip() for line in f)
handle = Entrez.efetch(db="protein", rettype="fasta", retmode="text", id=gi_numbers)
records = SeqIO.parse(handle, "fasta")
fout = open(outfilename, 'w')
for record in records:
#print ">" + record.seq
#print record.id
print record.description
#print record.seq
fout.write(">" + str(record.description) + "\n")
fout.write(str(record.seq) + "\n")
fout.close()
#for seq_record in SeqIO.parse(record, "fasta"):
# print seq_record.id
#fob2.write("high percent identity cutoff: " + str(high_identity2) + "\n")
|
10c6112dd343901b502c31655a001e612ed6e441 | api/logs/permissions.py | api/logs/permissions.py | # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
return False
| # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
if obj._backrefs.get('logged'):
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
if getattr(obj, 'node'):
if ContributorOrPublic().has_object_permission(request, view, obj.node):
return True
return False
| Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged. | Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged.
| Python | apache-2.0 | doublebits/osf.io,mluo613/osf.io,cwisecarver/osf.io,billyhunt/osf.io,baylee-d/osf.io,caneruguz/osf.io,mattclark/osf.io,Johnetordoff/osf.io,kwierman/osf.io,kwierman/osf.io,amyshi188/osf.io,acshi/osf.io,mfraezz/osf.io,zamattiac/osf.io,pattisdr/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,hmoco/osf.io,alexschiller/osf.io,chrisseto/osf.io,felliott/osf.io,laurenrevere/osf.io,chrisseto/osf.io,alexschiller/osf.io,abought/osf.io,felliott/osf.io,mluo613/osf.io,TomBaxter/osf.io,abought/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,doublebits/osf.io,emetsger/osf.io,mluo613/osf.io,kwierman/osf.io,aaxelb/osf.io,mluke93/osf.io,caneruguz/osf.io,Nesiehr/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,kwierman/osf.io,binoculars/osf.io,mluke93/osf.io,billyhunt/osf.io,doublebits/osf.io,chennan47/osf.io,Nesiehr/osf.io,saradbowman/osf.io,hmoco/osf.io,chennan47/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,cslzchen/osf.io,wearpants/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,adlius/osf.io,SSJohns/osf.io,icereval/osf.io,alexschiller/osf.io,mattclark/osf.io,wearpants/osf.io,zachjanicki/osf.io,jnayak1/osf.io,jnayak1/osf.io,chrisseto/osf.io,baylee-d/osf.io,amyshi188/osf.io,billyhunt/osf.io,samchrisinger/osf.io,erinspace/osf.io,rdhyee/osf.io,crcresearch/osf.io,hmoco/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,doublebits/osf.io,SSJohns/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,kch8qx/osf.io,erinspace/osf.io,kch8qx/osf.io,wearpants/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,sloria/osf.io,cslzchen/osf.io,doublebits/osf.io,Johnetordoff/osf.io,icereval/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,aaxelb/osf.io,emetsger/osf.io,Johnetordoff/osf.io,jnayak1/osf.io,kch8qx/osf.io,laurenrevere/osf.io,icereval/osf.io,emetsger/osf.io,leb2dg/osf.io,abought/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,adlius/osf.io,acshi/osf.io,alexschiller/osf.io,TomBaxter/osf.io,RomanZWang/osf.io,caneruguz/osf.io,adlius/osf.io,jnayak1/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,acshi/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,zamattiac/osf.io,mfraezz/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,kch8qx/osf.io,leb2dg/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,mluke93/osf.io,abought/osf.io,acshi/osf.io,alexschiller/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,mluke93/osf.io,adlius/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,leb2dg/osf.io,felliott/osf.io,chennan47/osf.io,pattisdr/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,acshi/osf.io,caseyrollins/osf.io,emetsger/osf.io,cslzchen/osf.io,wearpants/osf.io,mluo613/osf.io,DanielSBrown/osf.io,binoculars/osf.io,erinspace/osf.io,mattclark/osf.io,asanfilippo7/osf.io,hmoco/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,amyshi188/osf.io,felliott/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,kch8qx/osf.io,samchrisinger/osf.io,chrisseto/osf.io,crcresearch/osf.io,sloria/osf.io,RomanZWang/osf.io,binoculars/osf.io,aaxelb/osf.io,rdhyee/osf.io | # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
return False
Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged. | # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
if obj._backrefs.get('logged'):
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
if getattr(obj, 'node'):
if ContributorOrPublic().has_object_permission(request, view, obj.node):
return True
return False
| <commit_before># -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
return False
<commit_msg>Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged.<commit_after> | # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
if obj._backrefs.get('logged'):
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
if getattr(obj, 'node'):
if ContributorOrPublic().has_object_permission(request, view, obj.node):
return True
return False
| # -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
return False
Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged.# -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
if obj._backrefs.get('logged'):
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
if getattr(obj, 'node'):
if ContributorOrPublic().has_object_permission(request, view, obj.node):
return True
return False
| <commit_before># -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
return False
<commit_msg>Add case for when there are no node backrefs on logs. Again, this whole method will change when eliminating backrefs from nodelogs is merged.<commit_after># -*- coding: utf-8 -*-
from rest_framework import permissions
from website.models import Node, NodeLog
from api.nodes.permissions import ContributorOrPublic
from api.base.utils import get_object_or_error
class ContributorOrPublicForLogs(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (NodeLog)), 'obj must be a NodeLog, got {}'.format(obj)
if obj._backrefs.get('logged'):
for node_id in obj._backrefs['logged']['node']['logs']:
node = get_object_or_error(Node, node_id, display_name='node')
if ContributorOrPublic().has_object_permission(request, view, node):
return True
if getattr(obj, 'node'):
if ContributorOrPublic().has_object_permission(request, view, obj.node):
return True
return False
|
66f6529880da4e8c444c2872820fb6b207c3794e | Server/main.py | Server/main.py | from flask import Flask
from flask_restful import Api
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
pass
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
| from flask import Flask
from flask_restful import Api
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
handler = RotatingFileHandler('server_log.log', maxBytes=100000, backupCount=5)
handler.setLevel(logging.DEBUG if debug else logging.INFO)
formatter = logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s")
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Logger started')
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
| Add logging to before_first_request decorator | Add logging to before_first_request decorator
| Python | mit | DSM-GRAM/Artist,DSM-GRAM/Artist | from flask import Flask
from flask_restful import Api
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
pass
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
Add logging to before_first_request decorator | from flask import Flask
from flask_restful import Api
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
handler = RotatingFileHandler('server_log.log', maxBytes=100000, backupCount=5)
handler.setLevel(logging.DEBUG if debug else logging.INFO)
formatter = logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s")
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Logger started')
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
| <commit_before>from flask import Flask
from flask_restful import Api
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
pass
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
<commit_msg>Add logging to before_first_request decorator<commit_after> | from flask import Flask
from flask_restful import Api
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
handler = RotatingFileHandler('server_log.log', maxBytes=100000, backupCount=5)
handler.setLevel(logging.DEBUG if debug else logging.INFO)
formatter = logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s")
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Logger started')
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
| from flask import Flask
from flask_restful import Api
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
pass
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
Add logging to before_first_request decoratorfrom flask import Flask
from flask_restful import Api
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
handler = RotatingFileHandler('server_log.log', maxBytes=100000, backupCount=5)
handler.setLevel(logging.DEBUG if debug else logging.INFO)
formatter = logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s")
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Logger started')
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
| <commit_before>from flask import Flask
from flask_restful import Api
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
pass
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
<commit_msg>Add logging to before_first_request decorator<commit_after>from flask import Flask
from flask_restful import Api
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
api = Api(app)
debug = True
@app.before_first_request
def before_first_request():
handler = RotatingFileHandler('server_log.log', maxBytes=100000, backupCount=5)
handler.setLevel(logging.DEBUG if debug else logging.INFO)
formatter = logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s")
handler.setFormatter(formatter)
app.logger.addHandler(handler)
app.logger.info('Logger started')
@app.before_request
def before_request():
pass
@app.after_request
def after_request(response):
# flask.wrapper.Response 클래스의 인스턴스
return response
@app.teardown_request
def teardown_request(exception):
pass
@app.teardown_appcontext
def teardown_appcontext(exception):
pass
@app.route('/')
def index():
return 'hello'
if __name__ == '__main__':
app.run(debug=debug)
|
acec4dd403201dec5d22623c37ce1aff3324bc67 | drivnal/remote_snapshot.py | drivnal/remote_snapshot.py | from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
return ''
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
| from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
| Add get path for remote snapshot | Add get path for remote snapshot
| Python | agpl-3.0 | drivnal/drivnal,drivnal/drivnal,drivnal/drivnal | from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
return ''
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
Add get path for remote snapshot | from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
| <commit_before>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
return ''
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
<commit_msg>Add get path for remote snapshot<commit_after> | from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
| from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
return ''
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
Add get path for remote snapshotfrom constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
| <commit_before>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
return ''
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
<commit_msg>Add get path for remote snapshot<commit_after>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
5b1ab860a0706831b8abc77a060d6ba89cf8946a | interface/subprocess/001.backticks.py | interface/subprocess/001.backticks.py | import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
| import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
| Add docs to backtics function | interface.subprocess: Add docs to backtics function
| Python | unlicense | techtonik/discovery,techtonik/discovery,techtonik/discovery | import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
interface.subprocess: Add docs to backtics function | import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
| <commit_before>import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
<commit_msg>interface.subprocess: Add docs to backtics function<commit_after> | import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
| import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
interface.subprocess: Add docs to backtics functionimport subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
| <commit_before>import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
<commit_msg>interface.subprocess: Add docs to backtics function<commit_after>import subprocess
# --- replacing shell backticks ---
# https://docs.python.org/2/library/subprocess.html#replacing-bin-sh-shell-backquote
# output=`mycmd myarg`
# output = check_output(["mycmd", "myarg"])
# not true, because mycmd is not passed to shell
try:
output = subprocess.check_output(["mycmd", "myarg"], shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
# ^ information about error condition is lost
# ^ output in case of OSError is lost
# ux notes:
# - `mycmd myarg` > ["mycmd", "myarg"]
# - `` is invisible
# subprocess.check_output is hardly rememberable
# - exception checking is excessive and not needed
# (common pattern is to check return code)
def backticks(command):
'''
Execute `command and return output.
- no return code
- no stderr capture
- bailed out with MemoryError on Windows with 500Mb of output
'''
try:
# this doesn't escape shell patterns, such as:
# ^ (windows cmd.exe shell)
output = subprocess.check_output(command, shell=True)
except OSError as ex:
# command not found.
# it is impossible to catch output here, but shell outputs
# message to stderr, which backticks doesn't catch either
output = ''
except subprocess.CalledProcessError as ex:
output = ex.output
return output
|
91bb9574ec760efd8aba2d9ae8fe67fe2e69d0a2 | jacquard/buckets/tests/test_bucket.py | jacquard/buckets/tests/test_bucket.py | import pytest
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
| import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
| Add a test for getting an empty bucket | Add a test for getting an empty bucket
| Python | mit | prophile/jacquard,prophile/jacquard | import pytest
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
Add a test for getting an empty bucket | import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
| <commit_before>import pytest
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
<commit_msg>Add a test for getting an empty bucket<commit_after> | import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
| import pytest
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
Add a test for getting an empty bucketimport pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
| <commit_before>import pytest
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
<commit_msg>Add a test for getting an empty bucket<commit_after>import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
|
6c4e94f1133c9c9cd18b97a386f04f56b229f9a8 | las_reader/las2excel.py | las_reader/las2excel.py | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las-filename')
parser.add_argument('xls-filename')
return parser
if __name__ == '__main__':
main() | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
print args.__dict__.keys()
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las_filename')
parser.add_argument('xls_filename')
return parser
if __name__ == '__main__':
main() | Fix Namespace for cmd line args | Fix Namespace for cmd line args
| Python | mit | kinverarity1/las-reader,Kramer477/lasio,kinverarity1/lasio,VelizarVESSELINOV/las-reader,kwinkunks/lasio | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las-filename')
parser.add_argument('xls-filename')
return parser
if __name__ == '__main__':
main()Fix Namespace for cmd line args | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
print args.__dict__.keys()
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las_filename')
parser.add_argument('xls_filename')
return parser
if __name__ == '__main__':
main() | <commit_before>try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las-filename')
parser.add_argument('xls-filename')
return parser
if __name__ == '__main__':
main()<commit_msg>Fix Namespace for cmd line args<commit_after> | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
print args.__dict__.keys()
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las_filename')
parser.add_argument('xls_filename')
return parser
if __name__ == '__main__':
main() | try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las-filename')
parser.add_argument('xls-filename')
return parser
if __name__ == '__main__':
main()Fix Namespace for cmd line argstry:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
print args.__dict__.keys()
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las_filename')
parser.add_argument('xls_filename')
return parser
if __name__ == '__main__':
main() | <commit_before>try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las-filename')
parser.add_argument('xls-filename')
return parser
if __name__ == '__main__':
main()<commit_msg>Fix Namespace for cmd line args<commit_after>try:
import argparse
except ImportError:
argparse = None
import sys
import core
def main():
if argparse:
args = get_parser().parse_args(sys.argv[1:])
print args.__dict__.keys()
lasfn = args.las_filename
xlsfn = args.xls_filename
else:
if len(sys.argv >= 3):
lasfn = sys.argv[1]
xlsfn = sys.argv[2]
else:
print('Convert LAS file to Excel.\n\n'
'Usage:\n\n'
'las2excel.py example.las output.xls')
sys.exit(1)
las = core.LASFile(lasfn)
converter = core.ExcelConverter(las)
converter.write_excel(xlsfn)
def get_parser():
parser = argparse.ArgumentParser('Convert LAS file to Excel')
parser.add_argument('las_filename')
parser.add_argument('xls_filename')
return parser
if __name__ == '__main__':
main() |
ccb774b58ab7dbe704abfb7df3fa29915fad8f8f | examples/memnn/download.py | examples/memnn/download.py | #!/usr/bin/env python
from six.moves.urllib import request
def main():
opener = request.FancyURLopener()
opener.addheaders = [('User-Agent', '')]
opener.retrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from six.moves.urllib import request
def main():
request.urlretrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
| Replace deprecated URLopener in `donwload.py` | Replace deprecated URLopener in `donwload.py`
| Python | mit | niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer,wkentaro/chainer,pfnet/chainer,keisuke-umezawa/chainer,wkentaro/chainer,niboshi/chainer,niboshi/chainer,okuta/chainer,okuta/chainer,chainer/chainer,hvy/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,okuta/chainer,keisuke-umezawa/chainer,hvy/chainer,hvy/chainer,tkerola/chainer,hvy/chainer,chainer/chainer,chainer/chainer,okuta/chainer,niboshi/chainer | #!/usr/bin/env python
from six.moves.urllib import request
def main():
opener = request.FancyURLopener()
opener.addheaders = [('User-Agent', '')]
opener.retrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
Replace deprecated URLopener in `donwload.py` | #!/usr/bin/env python
from six.moves.urllib import request
def main():
request.urlretrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
from six.moves.urllib import request
def main():
opener = request.FancyURLopener()
opener.addheaders = [('User-Agent', '')]
opener.retrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
<commit_msg>Replace deprecated URLopener in `donwload.py`<commit_after> | #!/usr/bin/env python
from six.moves.urllib import request
def main():
request.urlretrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from six.moves.urllib import request
def main():
opener = request.FancyURLopener()
opener.addheaders = [('User-Agent', '')]
opener.retrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
Replace deprecated URLopener in `donwload.py`#!/usr/bin/env python
from six.moves.urllib import request
def main():
request.urlretrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
from six.moves.urllib import request
def main():
opener = request.FancyURLopener()
opener.addheaders = [('User-Agent', '')]
opener.retrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
<commit_msg>Replace deprecated URLopener in `donwload.py`<commit_after>#!/usr/bin/env python
from six.moves.urllib import request
def main():
request.urlretrieve(
'http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz',
'tasks_1-20_v1-2.tar.gz')
if __name__ == '__main__':
main()
|
1a1600b0cd27d5e004be344574901c64cdd6f7a2 | scripts/imgtool/__init__.py | scripts/imgtool/__init__.py | # Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.6.0"
| # Copyright 2017-2020 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0a1"
| Change imgtool version to 1.7.0a1 | Change imgtool version to 1.7.0a1
Signed-off-by: Ihor Slabkyy <5b878c9a28a92b9cb7e9988086921fcb7ae33592@cypress.com>
| Python | apache-2.0 | utzig/mcuboot,tamban01/mcuboot,utzig/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,tamban01/mcuboot,runtimeco/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,utzig/mcuboot,tamban01/mcuboot,tamban01/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,tamban01/mcuboot,utzig/mcuboot,utzig/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot | # Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.6.0"
Change imgtool version to 1.7.0a1
Signed-off-by: Ihor Slabkyy <5b878c9a28a92b9cb7e9988086921fcb7ae33592@cypress.com> | # Copyright 2017-2020 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0a1"
| <commit_before># Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.6.0"
<commit_msg>Change imgtool version to 1.7.0a1
Signed-off-by: Ihor Slabkyy <5b878c9a28a92b9cb7e9988086921fcb7ae33592@cypress.com><commit_after> | # Copyright 2017-2020 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0a1"
| # Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.6.0"
Change imgtool version to 1.7.0a1
Signed-off-by: Ihor Slabkyy <5b878c9a28a92b9cb7e9988086921fcb7ae33592@cypress.com># Copyright 2017-2020 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0a1"
| <commit_before># Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.6.0"
<commit_msg>Change imgtool version to 1.7.0a1
Signed-off-by: Ihor Slabkyy <5b878c9a28a92b9cb7e9988086921fcb7ae33592@cypress.com><commit_after># Copyright 2017-2020 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0a1"
|
361ebc774fba5489c1911ac40dde4828f6cbd374 | flysight_manager/report.py | flysight_manager/report.py | #!/usr/bin/env python
import log
from jinja2 import Template
import traceback
class Report(object):
def __init__(self):
self.logs = log.LogAggregator.new()
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'],
content)
| #!/usr/bin/env python
import log
import time
from jinja2 import Template
import traceback
class Report(object):
TIME_FMT = ": %y/%m/%d %H:%M %z (%Z)"
def __init__(self):
self.logs = log.LogAggregator.new()
self.started = time.strftime(TIME_FMT)
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'] + self.started,
content)
| Include the time in the email | Include the time in the email
| Python | mit | richo/flysight-manager,richo/flysight-manager | #!/usr/bin/env python
import log
from jinja2 import Template
import traceback
class Report(object):
def __init__(self):
self.logs = log.LogAggregator.new()
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'],
content)
Include the time in the email | #!/usr/bin/env python
import log
import time
from jinja2 import Template
import traceback
class Report(object):
TIME_FMT = ": %y/%m/%d %H:%M %z (%Z)"
def __init__(self):
self.logs = log.LogAggregator.new()
self.started = time.strftime(TIME_FMT)
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'] + self.started,
content)
| <commit_before>#!/usr/bin/env python
import log
from jinja2 import Template
import traceback
class Report(object):
def __init__(self):
self.logs = log.LogAggregator.new()
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'],
content)
<commit_msg>Include the time in the email<commit_after> | #!/usr/bin/env python
import log
import time
from jinja2 import Template
import traceback
class Report(object):
TIME_FMT = ": %y/%m/%d %H:%M %z (%Z)"
def __init__(self):
self.logs = log.LogAggregator.new()
self.started = time.strftime(TIME_FMT)
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'] + self.started,
content)
| #!/usr/bin/env python
import log
from jinja2 import Template
import traceback
class Report(object):
def __init__(self):
self.logs = log.LogAggregator.new()
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'],
content)
Include the time in the email#!/usr/bin/env python
import log
import time
from jinja2 import Template
import traceback
class Report(object):
TIME_FMT = ": %y/%m/%d %H:%M %z (%Z)"
def __init__(self):
self.logs = log.LogAggregator.new()
self.started = time.strftime(TIME_FMT)
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'] + self.started,
content)
| <commit_before>#!/usr/bin/env python
import log
from jinja2 import Template
import traceback
class Report(object):
def __init__(self):
self.logs = log.LogAggregator.new()
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'],
content)
<commit_msg>Include the time in the email<commit_after>#!/usr/bin/env python
import log
import time
from jinja2 import Template
import traceback
class Report(object):
TIME_FMT = ": %y/%m/%d %H:%M %z (%Z)"
def __init__(self):
self.logs = log.LogAggregator.new()
self.started = time.strftime(TIME_FMT)
def format_exception_as_reason(exc):
return traceback.format_exc(exc)
@log.make_loggable
class UploadReport(Report):
TEMPLATE_FILENAME = 'templates/uploader_report.jinja2'
def __init__(self, mailer, mail_cfg):
self.files = []
self.mailer = mailer
self.mail_cfg = mail_cfg
self.reason = None
super(UploadReport, self).__init__()
def add_uploaded_file(self, filename):
self.files.append(filename)
def finish_with_exception(self, exc):
reason = format_exception_as_reason(exc)
self.finish
def finish(self, reason):
self.reason = reason
def render(self):
tpl = Template(open(self.TEMPLATE_FILENAME).read())
return tpl.render(
reason=self.reason,
files=self.files,
logs=self.logs
)
def send(self):
content = self.render()
self.mailer.mail(
self.mail_cfg['to'],
self.mail_cfg['from'],
self.mail_cfg['subject'] + self.started,
content)
|
e2919039a20a255232fefe3b78e173587710baf0 | cla_backend/apps/core/middleware.py | cla_backend/apps/core/middleware.py | from django.http import Http404
from django_statsd.clients import statsd
class GraphiteMiddleware(object):
def process_response(self, request, response):
statsd.incr("response.%s" % response.status_code)
return response
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
statsd.incr("response.500")
| class GraphiteMiddleware(object):
def process_response(self, request, response):
return response
| Remove statsd code and resultant redundant code | Remove statsd code and resultant redundant code | Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | from django.http import Http404
from django_statsd.clients import statsd
class GraphiteMiddleware(object):
def process_response(self, request, response):
statsd.incr("response.%s" % response.status_code)
return response
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
statsd.incr("response.500")
Remove statsd code and resultant redundant code | class GraphiteMiddleware(object):
def process_response(self, request, response):
return response
| <commit_before>from django.http import Http404
from django_statsd.clients import statsd
class GraphiteMiddleware(object):
def process_response(self, request, response):
statsd.incr("response.%s" % response.status_code)
return response
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
statsd.incr("response.500")
<commit_msg>Remove statsd code and resultant redundant code<commit_after> | class GraphiteMiddleware(object):
def process_response(self, request, response):
return response
| from django.http import Http404
from django_statsd.clients import statsd
class GraphiteMiddleware(object):
def process_response(self, request, response):
statsd.incr("response.%s" % response.status_code)
return response
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
statsd.incr("response.500")
Remove statsd code and resultant redundant codeclass GraphiteMiddleware(object):
def process_response(self, request, response):
return response
| <commit_before>from django.http import Http404
from django_statsd.clients import statsd
class GraphiteMiddleware(object):
def process_response(self, request, response):
statsd.incr("response.%s" % response.status_code)
return response
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
statsd.incr("response.500")
<commit_msg>Remove statsd code and resultant redundant code<commit_after>class GraphiteMiddleware(object):
def process_response(self, request, response):
return response
|
6784c455cf93c16237661d6d9fed6af06726a880 | conveyor/processor.py | conveyor/processor.py | from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
| from __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Switch to more obvious imports | Switch to more obvious imports
| Python | bsd-2-clause | crateio/carrier | from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
Switch to more obvious imports | from __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
| <commit_before>from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
<commit_msg>Switch to more obvious imports<commit_after> | from __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
| from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
Switch to more obvious importsfrom __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
| <commit_before>from __future__ import absolute_import
from __future__ import division
import collections
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
<commit_msg>Switch to more obvious imports<commit_after>from __future__ import absolute_import
from __future__ import division
import collections
import slumber
import slumber.exceptions
import xmlrpc2.client
class BaseProcessor(object):
def __init__(self, index, warehouse, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
wargs, wkwargs = warehouse
self.client = xmlrpc2.client.Client(index)
self.warehouse = slumber.API(*wargs, **wkwargs)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
versions = self.client.package_releases(name, True)
else:
versions = [version]
for version in versions:
item = self.client.release_data(name, version)
url = self.client.release_urls(item["name"], item["version"])
if isinstance(url, collections.Mapping):
urls = [url]
elif isinstance(url, collections.Iterable):
urls = url
else:
raise RuntimeError("Do not understand the type returned by release_urls")
item.update({"files": urls})
yield item
class BulkProcessor(BaseProcessor):
def process(self):
pass
|
59becaccfc93e1d737be26483a18e9de1dfe1db1 | openedx/core/release.py | openedx/core/release.py | """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
| """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
| Revert "Set the RELEASE_LINE to hawthorn" | Revert "Set the RELEASE_LINE to hawthorn"
This reverts commit f44ac32bc9060cfa5ea6ce4284ce7f15b466be28.
| Python | agpl-3.0 | appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform | """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
Revert "Set the RELEASE_LINE to hawthorn"
This reverts commit f44ac32bc9060cfa5ea6ce4284ce7f15b466be28. | """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
| <commit_before>"""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
<commit_msg>Revert "Set the RELEASE_LINE to hawthorn"
This reverts commit f44ac32bc9060cfa5ea6ce4284ce7f15b466be28.<commit_after> | """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
| """
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
Revert "Set the RELEASE_LINE to hawthorn"
This reverts commit f44ac32bc9060cfa5ea6ce4284ce7f15b466be28."""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
| <commit_before>"""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
<commit_msg>Revert "Set the RELEASE_LINE to hawthorn"
This reverts commit f44ac32bc9060cfa5ea6ce4284ce7f15b466be28.<commit_after>"""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
|
2f56f7dccbc3c9fc416200160bd8616a5e4ab954 | barf/barf/__init__.py | barf/barf/__init__.py | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
level=logging.DEBUG
)
| # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
filemode='w',
level=logging.DEBUG
)
| Set logging in overwrite mode | Set logging in overwrite mode
| Python | bsd-2-clause | cnheitman/barf-project,chubbymaggie/barf-project,programa-stic/barf-project,programa-stic/barf-project,cnheitman/barf-project,chubbymaggie/barf-project,chubbymaggie/barf-project,cnheitman/barf-project | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
level=logging.DEBUG
)
Set logging in overwrite mode | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
filemode='w',
level=logging.DEBUG
)
| <commit_before># Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
level=logging.DEBUG
)
<commit_msg>Set logging in overwrite mode<commit_after> | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
filemode='w',
level=logging.DEBUG
)
| # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
level=logging.DEBUG
)
Set logging in overwrite mode# Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
filemode='w',
level=logging.DEBUG
)
| <commit_before># Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
level=logging.DEBUG
)
<commit_msg>Set logging in overwrite mode<commit_after># Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from barf import BARF
import analysis
import core
import arch
# Setup logging module.
logging.basicConfig(
filename="barf.log",
format="%(asctime)s: %(name)s:%(levelname)s: %(message)s",
filemode='w',
level=logging.DEBUG
)
|
1d63f615ac58cc8c548cdd8e359694355e5b1843 | portal/forms.py | portal/forms.py | from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
| from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class BootstrapForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
visible.field.widget.attrs['placeholder'] = 'Please enter value'
class UserForm(BootstrapForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
| Add BootstrapForm to beautify form_template | Add BootstrapForm to beautify form_template
| Python | mit | huangsam/chowist,huangsam/chowist,huangsam/chowist | from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
Add BootstrapForm to beautify form_template | from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class BootstrapForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
visible.field.widget.attrs['placeholder'] = 'Please enter value'
class UserForm(BootstrapForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
| <commit_before>from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
<commit_msg>Add BootstrapForm to beautify form_template<commit_after> | from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class BootstrapForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
visible.field.widget.attrs['placeholder'] = 'Please enter value'
class UserForm(BootstrapForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
| from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
Add BootstrapForm to beautify form_templatefrom django.contrib.auth.models import User
from django import forms
# Create your forms here.
class BootstrapForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
visible.field.widget.attrs['placeholder'] = 'Please enter value'
class UserForm(BootstrapForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
| <commit_before>from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
<commit_msg>Add BootstrapForm to beautify form_template<commit_after>from django.contrib.auth.models import User
from django import forms
# Create your forms here.
class BootstrapForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
visible.field.widget.attrs['placeholder'] = 'Please enter value'
class UserForm(BootstrapForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
|
d0b9824fe29e9c5772c0fc2838e2c6e373013819 | portal/forms.py | portal/forms.py | from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
| from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
| Fix indents on form module | Fix indents on form module
| Python | mit | huangsam/chowist,huangsam/chowist,huangsam/chowist | from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
Fix indents on form module | from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
| <commit_before>from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
<commit_msg>Fix indents on form module<commit_after> | from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
| from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
Fix indents on form modulefrom django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
| <commit_before>from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
<commit_msg>Fix indents on form module<commit_after>from django import forms
from django.contrib.auth.models import User
from portal.models import Profile
# Create your forms here.
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email', 'password']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['user', 'bio', 'address', 'birth_date']
|
6b880f3c783e6a278906b8da2aabea29bb106252 | thinc/neural/_classes/resnet.py | thinc/neural/_classes/resnet.py | from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
def Residual(layer):
def residual_fwd(X, drop=0.):
y, bp_y = layer.begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
model = layerize(residual_fwd)
model._layers.append(layer)
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
model.on_data_hooks.append(on_data)
return model
| from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layer[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
| Add predict path for Residual | Add predict path for Residual
| Python | mit | spacy-io/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,explosion/thinc | from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
def Residual(layer):
def residual_fwd(X, drop=0.):
y, bp_y = layer.begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
model = layerize(residual_fwd)
model._layers.append(layer)
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
model.on_data_hooks.append(on_data)
return model
Add predict path for Residual | from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layer[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
| <commit_before>from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
def Residual(layer):
def residual_fwd(X, drop=0.):
y, bp_y = layer.begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
model = layerize(residual_fwd)
model._layers.append(layer)
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
model.on_data_hooks.append(on_data)
return model
<commit_msg>Add predict path for Residual<commit_after> | from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layer[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
| from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
def Residual(layer):
def residual_fwd(X, drop=0.):
y, bp_y = layer.begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
model = layerize(residual_fwd)
model._layers.append(layer)
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
model.on_data_hooks.append(on_data)
return model
Add predict path for Residualfrom .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layer[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
| <commit_before>from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
def Residual(layer):
def residual_fwd(X, drop=0.):
y, bp_y = layer.begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
model = layerize(residual_fwd)
model._layers.append(layer)
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
model.on_data_hooks.append(on_data)
return model
<commit_msg>Add predict path for Residual<commit_after>from .model import Model
from ...api import layerize
from .affine import Affine
import cytoolz as toolz
class Residual(Model):
def __init__(self, layer):
Model.__init__(self)
self._layers.append(layer)
self.on_data_hooks.append(on_data)
def __call__(self, X):
return X + self._layers[0](X)
def begin_update(self, X, drop=0.):
y, bp_y = self._layer[0].begin_update(X, drop=drop)
output = X+y
def residual_bwd(d_output, sgd=None):
return d_output + bp_y(d_output, sgd)
return output, residual_bwd
def on_data(self, X, y=None):
for layer in self._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
if hasattr(layer, 'W'):
layer.W.fill(0)
|
105ac0020dbc60fe57da7db75fb82cf872a0834d | crm_switzerland/models/res_partner.py | crm_switzerland/models/res_partner.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
| FIX bug when sending notification to multiple partners | FIX bug when sending notification to multiple partners
| Python | agpl-3.0 | ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
FIX bug when sending notification to multiple partners | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
<commit_msg>FIX bug when sending notification to multiple partners<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
FIX bug when sending notification to multiple partners# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
<commit_msg>FIX bug when sending notification to multiple partners<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.multi
def schedule_meeting(self):
old_action = super(ResPartner, self).schedule_meeting()
new_action = self.env.ref(
'crm_switzerland.action_calendar_event_partner').read()[0]
new_action['domain'] = [('partner_ids', 'in', self.ids)]
new_action['context'] = {
'default_partner_ids': old_action['context'][
'default_partner_ids']
}
return new_action
@api.model
def _notify_prepare_template_context(self, message):
# modification of context for lang
message = message.with_context(lang=self[:1].lang or self.env.lang)
return super(ResPartner, self).\
_notify_prepare_template_context(message)
|
bb8d2fa458dd565b88db4e2185062f641864e990 | tornado/test/httpserver_test.py | tornado/test/httpserver_test.py | #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
| #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
try:
import ssl
except ImportError:
ssl = None
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
if ssl is None:
# Don't try to run ssl tests if we don't have the ssl module
del SSLTest
| Disable SSL test on python 2.5 | Disable SSL test on python 2.5
| Python | apache-2.0 | bywbilly/tornado,AlphaStaxLLC/tornado,felixonmars/tornado,jarrahwu/tornado,BencoLee/tornado,MjAbuz/tornado,shaohung001/tornado,sunjeammy/tornado,zhuochenKIDD/tornado,VShangxiao/tornado,LTD-Beget/tornado,Snamint/tornado,304471720/tornado,Callwoola/tornado,shashankbassi92/tornado,InverseLina/tornado,Batterfii/tornado,cyrusin/tornado,gwillem/tornado,bywbilly/tornado,erichuang1994/tornado,AlphaStaxLLC/tornado,liqueur/tornado,mlyundin/tornado,zguangyu/tornado,fengsp/tornado,0x73/tornado,ColorFuzzy/tornado,ms7s/tornado,shashankbassi92/tornado,ListFranz/tornado,bywbilly/tornado,BencoLee/tornado,wsyzxcn/tornado,ms7s/tornado,mivade/tornado,eXcomm/tornado,kippandrew/tornado,nbargnesi/tornado,304471720/tornado,eXcomm/tornado,yuyangit/tornado,mehmetkose/tornado,nephics/tornado,leekchan/tornado_test,noxiouz/tornado,jonashagstedt/tornado,Lancher/tornado,pombredanne/tornado,erichuang1994/tornado,304471720/tornado,z-fork/tornado,ubear/tornado,kevinge314gh/tornado,ovidiucp/tornado,wechasing/tornado,hhru/tornado,jarrahwu/tornado,coderhaoxin/tornado,VShangxiao/tornado,304471720/tornado,fengsp/tornado,yuezhonghua/tornado,elelianghh/tornado,kaushik94/tornado,erichuang1994/tornado,hzruandd/tornado,erichuang1994/tornado,MjAbuz/tornado,Fydot/tornado,nephics/tornado,andyaguiar/tornado,Aaron1992/tornado,dongpinglai/my_tornado,legnaleurc/tornado,chenxiaba/tornado,arthurdarcet/tornado,ColorFuzzy/tornado,ms7s/tornado,yuyangit/tornado,elelianghh/tornado,jsjohnst/tornado,futurechallenger/tornado,gwillem/tornado,Drooids/tornado,codeb2cc/tornado,leekchan/tornado_test,mlyundin/tornado,allenl203/tornado,mivade/tornado,akalipetis/tornado,elijah513/tornado,ovidiucp/tornado,chenxiaba/tornado,noxiouz/tornado,shaohung001/tornado,ydaniv/tornado,jsjohnst/tornado,zguangyu/tornado,hzruandd/tornado,dongpinglai/my_tornado,kippandrew/tornado,johan--/tornado,lujinda/tornado,lujinda/tornado,jarrahwu/tornado,Geoion/tornado,hhru/tornado,elijah513/tornado,LTD-Beget/tornado,ajdavis/tornado,xinyu7/tornado,icejoywoo/tornado,Snamint/tornado,whip112/tornado,Acidburn0zzz/tornado,xinyu7/tornado,shashankbassi92/tornado,djt5019/tornado,Windsooon/tornado,kangbiao/tornado,ajdavis/tornado,Lancher/tornado,Snamint/tornado,tianyk/tornado-research,jehiah/tornado,yangkf1985/tornado,hzruandd/tornado,nordaux/tornado,jparise/tornado,BencoLee/tornado,felixonmars/tornado,nbargnesi/tornado,BencoLee/tornado,zguangyu/tornado,ubear/tornado,zhuochenKIDD/tornado,mehmetkose/tornado,fengshao0907/tornado,wujuguang/tornado,Fydot/tornado,jarrahwu/tornado,liqueur/tornado,hzruandd/tornado,ZhuPeng/tornado,jehiah/tornado,codecov/tornado,VShangxiao/tornado,wujuguang/tornado,sevenguin/tornado,z-fork/tornado,eXcomm/tornado,obsh/tornado,Polyconseil/tornado,tianyk/tornado-research,leekchan/tornado_test,ovidiucp/tornado,djt5019/tornado,ymero/tornado,takeshineshiro/tornado,tornadoweb/tornado,johan--/tornado,lsanotes/tornado,mehmetkose/tornado,johan--/tornado,noxiouz/tornado,futurechallenger/tornado,bufferx/tornado,elelianghh/tornado,gwillem/tornado,gitchs/tornado,bdarnell/tornado,lilydjwg/tornado,gitchs/tornado,futurechallenger/tornado,Windsooon/tornado,kaushik94/tornado,mivade/tornado,ajdavis/tornado,liqueur/tornado,AlphaStaxLLC/tornado,jparise/tornado,wsyzxcn/tornado,yuezhonghua/tornado,z-fork/tornado,djt5019/tornado,MjAbuz/tornado,gwillem/tornado,dongpinglai/my_tornado,Aaron1992/tornado,wxhzk/tornado-1,ydaniv/tornado,ColorFuzzy/tornado,sxfmol/tornado,hhru/tornado,Snamint/tornado,LTD-Beget/tornado,sevenguin/tornado,Windsooon/tornado,ydaniv/tornado,zhuochenKIDD/tornado,kevinge314gh/tornado,NoyaInRain/tornado,cyrilMargaria/tornado,ovidiucp/tornado,jonashagstedt/tornado,zhuochenKIDD/tornado,elijah513/tornado,Windsooon/tornado,BencoLee/tornado,Polyconseil/tornado,mr-ping/tornado,Acidburn0zzz/tornado,QuanZag/tornado,frtmelody/tornado,mlyundin/tornado,codeb2cc/tornado,liqueur/tornado,coderhaoxin/tornado,legnaleurc/tornado,kippandrew/tornado,ListFranz/tornado,pombredanne/tornado,fengshao0907/tornado,kevinge314gh/tornado,NoyaInRain/tornado,gitchs/tornado,akalipetis/tornado,sunjeammy/tornado,wxhzk/tornado-1,leekchan/tornado_test,QuanZag/tornado,gitchs/tornado,dsseter/tornado,fengsp/tornado,AlphaStaxLLC/tornado,codecov/tornado,nbargnesi/tornado,futurechallenger/tornado,cyrilMargaria/tornado,NoyaInRain/tornado,fengshao0907/tornado,anandology/tornado,mr-ping/tornado,lujinda/tornado,leekchan/tornado_test,SuminAndrew/tornado,anandology/tornado,0xkag/tornado,mr-ping/tornado,frtmelody/tornado,icejoywoo/tornado,QuanZag/tornado,wsyzxcn/tornado,eklitzke/tornado,elijah513/tornado,insflow/tornado,hzruandd/tornado,anjan-srivastava/tornado,jehiah/tornado,Aaron1992/tornado,dongpinglai/my_tornado,Geoion/tornado,arthurdarcet/tornado,arthurdarcet/tornado,kangbiao/tornado,Batterfii/tornado,noxiouz/tornado,lilydjwg/tornado,nordaux/tornado,codeb2cc/tornado,arthurdarcet/tornado,Lancher/tornado,insflow/tornado,Callwoola/tornado,anjan-srivastava/tornado,jampp/tornado,InverseLina/tornado,Acidburn0zzz/tornado,andyaguiar/tornado,mehmetkose/tornado,mr-ping/tornado,tornadoweb/tornado,kangbiao/tornado,dongpinglai/my_tornado,304471720/tornado,Polyconseil/tornado,shaohung001/tornado,SuminAndrew/tornado,elijah513/tornado,mivade/tornado,wechasing/tornado,yangkf1985/tornado,NoyaInRain/tornado,xinyu7/tornado,jonashagstedt/tornado,hzruandd/tornado,drewmiller/tornado,obsh/tornado,jparise/tornado,nephics/tornado,andyaguiar/tornado,ymero/tornado,lilydjwg/tornado,ymero/tornado,lujinda/tornado,coderhaoxin/tornado,nbargnesi/tornado,whip112/tornado,lsanotes/tornado,jarrahwu/tornado,ifduyue/tornado,NoyaInRain/tornado,sevenguin/tornado,0xkag/tornado,allenl203/tornado,codeb2cc/tornado,jampp/tornado,lsanotes/tornado,anandology/tornado,xinyu7/tornado,bdarnell/tornado,mehmetkose/tornado,allenl203/tornado,noxiouz/tornado,lsanotes/tornado,sxfmol/tornado,frtmelody/tornado,Polyconseil/tornado,wxhzk/tornado-1,drewmiller/tornado,Snamint/tornado,z-fork/tornado,dsseter/tornado,zguangyu/tornado,bywbilly/tornado,whip112/tornado,Batterfii/tornado,cyrusin/tornado,legnaleurc/tornado,wsyzxcn/tornado,pombredanne/tornado,obsh/tornado,Polyconseil/tornado,xinyu7/tornado,0x73/tornado,wxhzk/tornado-1,ifduyue/tornado,QuanZag/tornado,mlyundin/tornado,cyrilMargaria/tornado,liqueur/tornado,eklitzke/tornado,eXcomm/tornado,ListFranz/tornado,tianyk/tornado-research,andyaguiar/tornado,Batterfii/tornado,ovidiucp/tornado,takeshineshiro/tornado,chenxiaba/tornado,Snamint/tornado,felixonmars/tornado,0xkag/tornado,bufferx/tornado,ColorFuzzy/tornado,ms7s/tornado,wxhzk/tornado-1,wujuguang/tornado,jarrahwu/tornado,Batterfii/tornado,chenxiaba/tornado,SuminAndrew/tornado,yuezhonghua/tornado,legnaleurc/tornado,eXcomm/tornado,chenxiaba/tornado,codecov/tornado,whip112/tornado,sxfmol/tornado,whip112/tornado,LTD-Beget/tornado,insflow/tornado,elelianghh/tornado,Drooids/tornado,NoyaInRain/tornado,eklitzke/tornado,takeshineshiro/tornado,Geoion/tornado,ydaniv/tornado,sevenguin/tornado,0xkag/tornado,Drooids/tornado,mlyundin/tornado,zhuochenKIDD/tornado,chenxiaba/tornado,kippandrew/tornado,mehmetkose/tornado,futurechallenger/tornado,hhru/tornado,anjan-srivastava/tornado,djt5019/tornado,ubear/tornado,arthurdarcet/tornado,nbargnesi/tornado,takeshineshiro/tornado,dsseter/tornado,Fydot/tornado,coderhaoxin/tornado,icejoywoo/tornado,cyrilMargaria/tornado,cyrusin/tornado,fengshao0907/tornado,frtmelody/tornado,felixonmars/tornado,VShangxiao/tornado,Drooids/tornado,drewmiller/tornado,akalipetis/tornado,kangbiao/tornado,Fydot/tornado,jonashagstedt/tornado,bdarnell/tornado,coderhaoxin/tornado,SuminAndrew/tornado,ubear/tornado,InverseLina/tornado,bywbilly/tornado,0x73/tornado,jsjohnst/tornado,Geoion/tornado,icejoywoo/tornado,lujinda/tornado,Acidburn0zzz/tornado,anandology/tornado,anjan-srivastava/tornado,shashankbassi92/tornado,kevinge314gh/tornado,anandology/tornado,gitchs/tornado,tornadoweb/tornado,LTD-Beget/tornado,gitchs/tornado,zguangyu/tornado,insflow/tornado,zguangyu/tornado,elelianghh/tornado,Fydot/tornado,zhuochenKIDD/tornado,ColorFuzzy/tornado,Lancher/tornado,dsseter/tornado,z-fork/tornado,obsh/tornado,elelianghh/tornado,sxfmol/tornado,sunjeammy/tornado,icejoywoo/tornado,allenl203/tornado,johan--/tornado,lilydjwg/tornado,sunjeammy/tornado,akalipetis/tornado,Callwoola/tornado,drewmiller/tornado,yangkf1985/tornado,ifduyue/tornado,cyrilMargaria/tornado,akalipetis/tornado,fengsp/tornado,ymero/tornado,codeb2cc/tornado,wujuguang/tornado,SuminAndrew/tornado,jehiah/tornado,anjan-srivastava/tornado,Drooids/tornado,Batterfii/tornado,mivade/tornado,legnaleurc/tornado,ifduyue/tornado,ZhuPeng/tornado,jparise/tornado,304471720/tornado,wechasing/tornado,ymero/tornado,andyaguiar/tornado,ifduyue/tornado,wechasing/tornado,jparise/tornado,bufferx/tornado,erichuang1994/tornado,ms7s/tornado,eklitzke/tornado,bufferx/tornado,InverseLina/tornado,importcjj/tornado,VShangxiao/tornado,MjAbuz/tornado,yuyangit/tornado,bdarnell/tornado,nephics/tornado,anandology/tornado,sunjeammy/tornado,yangkf1985/tornado,nephics/tornado,Callwoola/tornado,importcjj/tornado,nordaux/tornado,InverseLina/tornado,allenl203/tornado,sevenguin/tornado,cyrusin/tornado,z-fork/tornado,Acidburn0zzz/tornado,shashankbassi92/tornado,nbargnesi/tornado,tianyk/tornado-research,eXcomm/tornado,ajdavis/tornado,kaushik94/tornado,insflow/tornado,ymero/tornado,ydaniv/tornado,importcjj/tornado,takeshineshiro/tornado,whip112/tornado,hhru/tornado,obsh/tornado,wechasing/tornado,cyrilMargaria/tornado,fengsp/tornado,MjAbuz/tornado,QuanZag/tornado,Drooids/tornado,futurechallenger/tornado,VShangxiao/tornado,mr-ping/tornado,lujinda/tornado,dsseter/tornado,sxfmol/tornado,codeb2cc/tornado,wujuguang/tornado,bdarnell/tornado,kippandrew/tornado,yuezhonghua/tornado,takeshineshiro/tornado,jsjohnst/tornado,kangbiao/tornado,AlphaStaxLLC/tornado,noxiouz/tornado,mr-ping/tornado,LTD-Beget/tornado,shaohung001/tornado,gwillem/tornado,drewmiller/tornado,jampp/tornado,dongpinglai/my_tornado,kangbiao/tornado,ColorFuzzy/tornado,kippandrew/tornado,jampp/tornado,frtmelody/tornado,fengshao0907/tornado,liqueur/tornado,wsyzxcn/tornado,kevinge314gh/tornado,ZhuPeng/tornado,kaushik94/tornado,ListFranz/tornado,ZhuPeng/tornado,cyrusin/tornado,djt5019/tornado,sevenguin/tornado,Lancher/tornado,jparise/tornado,xinyu7/tornado,pombredanne/tornado,wxhzk/tornado-1,lsanotes/tornado,yangkf1985/tornado,importcjj/tornado,elijah513/tornado,shaohung001/tornado,0x73/tornado,Windsooon/tornado,MjAbuz/tornado,ajdavis/tornado,ZhuPeng/tornado,fengsp/tornado,erichuang1994/tornado,arthurdarcet/tornado,QuanZag/tornado,gwillem/tornado,yuyangit/tornado,lsanotes/tornado,InverseLina/tornado,wsyzxcn/tornado,coderhaoxin/tornado,Callwoola/tornado,johan--/tornado,kaushik94/tornado,sxfmol/tornado,ZhuPeng/tornado,0xkag/tornado,jampp/tornado,obsh/tornado,Geoion/tornado,importcjj/tornado,ListFranz/tornado,jsjohnst/tornado,yuezhonghua/tornado,pombredanne/tornado,jehiah/tornado,Fydot/tornado,BencoLee/tornado,icejoywoo/tornado,importcjj/tornado,fengshao0907/tornado,ubear/tornado,tianyk/tornado-research,Aaron1992/tornado,bywbilly/tornado,shashankbassi92/tornado,drewmiller/tornado,ms7s/tornado,Polyconseil/tornado,0x73/tornado,kevinge314gh/tornado,dsseter/tornado,pombredanne/tornado,akalipetis/tornado,frtmelody/tornado,felixonmars/tornado,jsjohnst/tornado,tornadoweb/tornado,Callwoola/tornado,wsyzxcn/tornado,jampp/tornado,Acidburn0zzz/tornado,nordaux/tornado,djt5019/tornado,Windsooon/tornado,nordaux/tornado,anjan-srivastava/tornado,eklitzke/tornado,johan--/tornado,ListFranz/tornado,yuyangit/tornado,Geoion/tornado,cyrusin/tornado,jonashagstedt/tornado,mlyundin/tornado,shaohung001/tornado,insflow/tornado,ubear/tornado,Aaron1992/tornado,bufferx/tornado,yangkf1985/tornado,yuezhonghua/tornado,ydaniv/tornado,wechasing/tornado,AlphaStaxLLC/tornado,andyaguiar/tornado,codecov/tornado,ovidiucp/tornado | #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
Disable SSL test on python 2.5 | #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
try:
import ssl
except ImportError:
ssl = None
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
if ssl is None:
# Don't try to run ssl tests if we don't have the ssl module
del SSLTest
| <commit_before>#!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
<commit_msg>Disable SSL test on python 2.5<commit_after> | #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
try:
import ssl
except ImportError:
ssl = None
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
if ssl is None:
# Don't try to run ssl tests if we don't have the ssl module
del SSLTest
| #!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
Disable SSL test on python 2.5#!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
try:
import ssl
except ImportError:
ssl = None
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
if ssl is None:
# Don't try to run ssl tests if we don't have the ssl module
del SSLTest
| <commit_before>#!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
<commit_msg>Disable SSL test on python 2.5<commit_after>#!/usr/bin/env python
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.web import Application, RequestHandler
import os
import pycurl
import re
import unittest
import urllib
try:
import ssl
except ImportError:
ssl = None
class HelloWorldRequestHandler(RequestHandler):
def get(self):
self.finish("Hello world")
class SSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
test_dir = os.path.dirname(__file__)
return dict(ssl_options=dict(
certfile=os.path.join(test_dir, 'test.crt'),
keyfile=os.path.join(test_dir, 'test.key')))
def test_ssl(self):
def disable_cert_check(curl):
# Our certificate was not signed by a CA, so don't check it
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.http_client.fetch(self.get_url('/').replace('http', 'https'),
self.stop,
prepare_curl_callback=disable_cert_check)
response = self.wait()
self.assertEqual(response.body, "Hello world")
if ssl is None:
# Don't try to run ssl tests if we don't have the ssl module
del SSLTest
|
39cc30f2f6c74d3a506c5d1a46cf0ccc6377b80f | pylibscrypt/__init__.py | pylibscrypt/__init__.py |
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
| Use pyscrypt.py in package import if libscrypt isn't available | Use pyscrypt.py in package import if libscrypt isn't available
| Python | isc | jvarho/pylibscrypt,jvarho/pylibscrypt |
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
Use pyscrypt.py in package import if libscrypt isn't available |
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
| <commit_before>
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
<commit_msg>Use pyscrypt.py in package import if libscrypt isn't available<commit_after> |
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
Use pyscrypt.py in package import if libscrypt isn't available
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
| <commit_before>
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, get the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
<commit_msg>Use pyscrypt.py in package import if libscrypt isn't available<commit_after>
# First, try loading libscrypt
_done = False
try:
from pylibscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work, try the scrypt module
if not _done:
try:
from pyscrypt import *
except ImportError:
pass
else:
_done = True
# If that didn't work either, the inlined Python version
if not _done:
try:
from pypyscrypt_inline import *
except ImportError:
pass
else:
_done = True
# Finally the non-inlined
if not _done:
from pypyscrypt import *
|
00aa59468c4dbfde282891f1396e29bd3f28fb62 | gunny/reveille/service.py | gunny/reveille/service.py | from twisted.application import internet
from twisted.application.service import Service
from twisted.internet import reactor
from autobahn.websocket import connectWS
class ControlService(Service):
pass
class PlayerService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
self.factory.startFactory()
self.conn = connectWS(self.factory)
self.running = 1
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = 0
| from twisted.application.service import Service
from twisted.internet import stdio
from autobahn.websocket import connectWS
class CoxswainService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
#self.factory(ReveilleCommandProtocol())
self.conn = connectWS(self.factory)
self.running = True
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = False
| Rename classes to reflect intended use. | Rename classes to reflect intended use.
| Python | bsd-2-clause | davidblewett/gunny,davidblewett/gunny | from twisted.application import internet
from twisted.application.service import Service
from twisted.internet import reactor
from autobahn.websocket import connectWS
class ControlService(Service):
pass
class PlayerService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
self.factory.startFactory()
self.conn = connectWS(self.factory)
self.running = 1
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = 0
Rename classes to reflect intended use. | from twisted.application.service import Service
from twisted.internet import stdio
from autobahn.websocket import connectWS
class CoxswainService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
#self.factory(ReveilleCommandProtocol())
self.conn = connectWS(self.factory)
self.running = True
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = False
| <commit_before>from twisted.application import internet
from twisted.application.service import Service
from twisted.internet import reactor
from autobahn.websocket import connectWS
class ControlService(Service):
pass
class PlayerService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
self.factory.startFactory()
self.conn = connectWS(self.factory)
self.running = 1
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = 0
<commit_msg>Rename classes to reflect intended use.<commit_after> | from twisted.application.service import Service
from twisted.internet import stdio
from autobahn.websocket import connectWS
class CoxswainService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
#self.factory(ReveilleCommandProtocol())
self.conn = connectWS(self.factory)
self.running = True
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = False
| from twisted.application import internet
from twisted.application.service import Service
from twisted.internet import reactor
from autobahn.websocket import connectWS
class ControlService(Service):
pass
class PlayerService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
self.factory.startFactory()
self.conn = connectWS(self.factory)
self.running = 1
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = 0
Rename classes to reflect intended use.from twisted.application.service import Service
from twisted.internet import stdio
from autobahn.websocket import connectWS
class CoxswainService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
#self.factory(ReveilleCommandProtocol())
self.conn = connectWS(self.factory)
self.running = True
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = False
| <commit_before>from twisted.application import internet
from twisted.application.service import Service
from twisted.internet import reactor
from autobahn.websocket import connectWS
class ControlService(Service):
pass
class PlayerService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
self.factory.startFactory()
self.conn = connectWS(self.factory)
self.running = 1
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = 0
<commit_msg>Rename classes to reflect intended use.<commit_after>from twisted.application.service import Service
from twisted.internet import stdio
from autobahn.websocket import connectWS
class CoxswainService(Service):
def __init__(self, factory):
self.factory = factory
self.conn = None
def startService(self):
#self.factory(ReveilleCommandProtocol())
self.conn = connectWS(self.factory)
self.running = True
def stopService(self):
self.factory.stopFactory()
if self.conn is not None:
self.conn.disconnect()
self.running = False
|
7df1ed120281c82d166fa1c2218def4c84b48a3d | alfred_db/migrations/versions/30c0aec2ca06_improve_repository_o.py | alfred_db/migrations/versions/30c0aec2ca06_improve_repository_o.py | """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', u'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column(u'user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
| """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', 'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column('user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
| Remove unicode string markers which are removed in python3 | Remove unicode string markers which are removed in python3
| Python | isc | alfredhq/alfred-db | """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', u'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column(u'user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
Remove unicode string markers which are removed in python3 | """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', 'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column('user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
| <commit_before>"""Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', u'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column(u'user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
<commit_msg>Remove unicode string markers which are removed in python3<commit_after> | """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', 'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column('user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
| """Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', u'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column(u'user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
Remove unicode string markers which are removed in python3"""Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', 'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column('user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
| <commit_before>"""Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', u'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column(u'user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
<commit_msg>Remove unicode string markers which are removed in python3<commit_after>"""Improve repository owner information
Revision ID: 30c0aec2ca06
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:45:05.241933
"""
# revision identifiers, used by Alembic.
revision = '30c0aec2ca06'
down_revision = '4fdf1059c4ba'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'repositories',
sa.Column('owner_type', sa.Enum('organization', 'user', native_enum=False),
nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_name', sa.String(), nullable=False)
)
op.add_column(
'repositories',
sa.Column('owner_id', sa.Integer(), nullable=False)
)
op.drop_column('repositories', 'user')
op.create_unique_constraint(
"uq_owner_type_owner_name",
"repositories",
["owner_type", "owner_name"],
)
def downgrade():
op.add_column(
'repositories',
sa.Column('user', sa.String(), nullable=False)
)
op.drop_constraint('uq_owner_type_owner_name', 'repositories', 'unique')
op.drop_column('repositories', 'owner_id')
op.drop_column('repositories', 'owner_name')
op.drop_column('repositories', 'owner_type')
|
9938678e05270c06d328aeb466ab827bab232e3a | solar_neighbourhood/prepare_data_add_kinematics.py | solar_neighbourhood/prepare_data_add_kinematics.py | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = Table.read('../data/ScoCen_box_result.fits')
d = Table.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = '../data/ScoCen_box_result.fits')
d = tabletool.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) | Convert entire table to cartesian | Convert entire table to cartesian
| Python | mit | mikeireland/chronostar,mikeireland/chronostar,mikeireland/chronostar,mikeireland/chronostar | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = Table.read('../data/ScoCen_box_result.fits')
d = Table.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d))Convert entire table to cartesian | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = '../data/ScoCen_box_result.fits')
d = tabletool.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) | <commit_before>"""
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = Table.read('../data/ScoCen_box_result.fits')
d = Table.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d))<commit_msg>Convert entire table to cartesian<commit_after> | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = '../data/ScoCen_box_result.fits')
d = tabletool.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) | """
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = Table.read('../data/ScoCen_box_result.fits')
d = Table.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d))Convert entire table to cartesian"""
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = '../data/ScoCen_box_result.fits')
d = tabletool.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) | <commit_before>"""
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = Table.read('../data/ScoCen_box_result.fits')
d = Table.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d))<commit_msg>Convert entire table to cartesian<commit_after>"""
Add very large RV errors for stars with no known RVs.
Convert to cartesian.
"""
import numpy as np
import sys
sys.path.insert(0, '..')
from chronostar import tabletool
from astropy.table import Table
datafile = '../data/ScoCen_box_result.fits')
d = tabletool.read(datafile)
# Set missing radial velocities (nan) to 0
d['radial_velocity'] = np.nan_to_num(d['radial_velocity'])
# Set missing radial velocity errors (nan) to 1e+10
d['radial_velocity_error'][np.isnan(d['radial_velocity_error'])] = 1e+4
print('Convert to cartesian')
tabletool.convert_table_astro2cart(table=d, return_table=True)
d.write('../data/ScoCen_box_result_15M_ready_for_bg_ols.fits')
print('Cartesian written.', len(d)) |
364d83c8add1fdde679aa2823ae94ad7f264cb48 | raco/relation_key.py | raco/relation_key.py | """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
| """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(str(self))
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
| Add hash function to RelationKey | Add hash function to RelationKey
| Python | bsd-3-clause | uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco | """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
Add hash function to RelationKey | """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(str(self))
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
| <commit_before>"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
<commit_msg>Add hash function to RelationKey<commit_after> | """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(str(self))
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
| """Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
Add hash function to RelationKey"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(str(self))
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
| <commit_before>"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
<commit_msg>Add hash function to RelationKey<commit_after>"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(self, user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(str(self))
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
93a23b3aed48da6953914036ae488c5b3ab891c7 | scikits/audiolab/soundio/alsa.py | scikits/audiolab/soundio/alsa.py | import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
try:
dev = Device(samplerate = samplerate, channels = nc)
assert nc == dev.channels
assert input.dtype == np.float32 or input.dtype == np.float64
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
| import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
if not input.dtype in (np.float32, np.float64):
raise ValueError("input should be array of float32 or float64 !")
try:
dev = Device(samplerate = samplerate, channels = nc)
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
| Check input dtype before creating pcm device. | Check input dtype before creating pcm device.
| Python | lgpl-2.1 | cournape/audiolab,cournape/audiolab,cournape/audiolab | import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
try:
dev = Device(samplerate = samplerate, channels = nc)
assert nc == dev.channels
assert input.dtype == np.float32 or input.dtype == np.float64
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
Check input dtype before creating pcm device. | import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
if not input.dtype in (np.float32, np.float64):
raise ValueError("input should be array of float32 or float64 !")
try:
dev = Device(samplerate = samplerate, channels = nc)
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
| <commit_before>import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
try:
dev = Device(samplerate = samplerate, channels = nc)
assert nc == dev.channels
assert input.dtype == np.float32 or input.dtype == np.float64
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
<commit_msg>Check input dtype before creating pcm device.<commit_after> | import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
if not input.dtype in (np.float32, np.float64):
raise ValueError("input should be array of float32 or float64 !")
try:
dev = Device(samplerate = samplerate, channels = nc)
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
| import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
try:
dev = Device(samplerate = samplerate, channels = nc)
assert nc == dev.channels
assert input.dtype == np.float32 or input.dtype == np.float64
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
Check input dtype before creating pcm device.import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
if not input.dtype in (np.float32, np.float64):
raise ValueError("input should be array of float32 or float64 !")
try:
dev = Device(samplerate = samplerate, channels = nc)
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
| <commit_before>import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
try:
dev = Device(samplerate = samplerate, channels = nc)
assert nc == dev.channels
assert input.dtype == np.float32 or input.dtype == np.float64
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
<commit_msg>Check input dtype before creating pcm device.<commit_after>import numpy as np
from _alsa import card_name, card_indexes, asoundlib_version
from _alsa import Device, AlsaException
def play(input, samplerate = 48000):
if input.ndim == 1:
n = input.size
nc = 1
elif input.ndim == 2:
n, nc = input.shape
else:
raise ValueError("Only ndim 1 or 2 supported")
if not input.dtype in (np.float32, np.float64):
raise ValueError("input should be array of float32 or float64 !")
try:
dev = Device(samplerate = samplerate, channels = nc)
dev.play_short((16384 * input).astype(np.int16))
except AlsaException, e:
raise IOError(str(e))
if __name__ == '__main__':
print "Asoundlib version is", asoundlib_version()
for i in card_indexes():
print card_name(i)
dev = Device()
print "Device name:", dev.name
a = 0.2 * np.random.randn(4e4)
play(a, 16000)
play(a, 8000)
play(a, 22050)
|
9c650cb3fb37e8c96ef9642af553ce77a28a1587 | problem-static/Intro-Eval_50/admin/eval.py | problem-static/Intro-Eval_50/admin/eval.py | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | Move welcome message to outside the loop | Move welcome message to outside the loop
| Python | mit | james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()Move welcome message to outside the loop | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | <commit_before>#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()<commit_msg>Move welcome message to outside the loop<commit_after> | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()Move welcome message to outside the loop#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | <commit_before>#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()<commit_msg>Move welcome message to outside the loop<commit_after>#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() |
1bbffc2152ea1c48b47153005beeb2974b682f3c | bot/actions/action.py | bot/actions/action.py | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| Fix for_each incorrectly using lazy map operator | Fix for_each incorrectly using lazy map operator
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
Fix for_each incorrectly using lazy map operator | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| <commit_before>from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
<commit_msg>Fix for_each incorrectly using lazy map operator<commit_after> | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
Fix for_each incorrectly using lazy map operatorfrom bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| <commit_before>from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
<commit_msg>Fix for_each incorrectly using lazy map operator<commit_after>from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
|
eb7ff9cec9360af0b5c18915164a54d4755e657b | mistraldashboard/dashboards/mistral/executions/tables.py | mistraldashboard/dashboards/mistral/executions/tables.py | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
action = tables.Column("action", verbose_name=_("Action"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
| # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
parameters = tables.Column("parameters", verbose_name=_("Parameters"))
output = tables.Column("output", verbose_name=_("Output"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
| Add Task's output and parameters columns | Add Task's output and parameters columns
Change-Id: I98f57a6a0178bb7258d82f3a165127f060f42f7b
Implements: blueprint mistral-ui
| Python | apache-2.0 | openstack/mistral-dashboard,openstack/mistral-dashboard,openstack/mistral-dashboard | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
action = tables.Column("action", verbose_name=_("Action"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
Add Task's output and parameters columns
Change-Id: I98f57a6a0178bb7258d82f3a165127f060f42f7b
Implements: blueprint mistral-ui | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
parameters = tables.Column("parameters", verbose_name=_("Parameters"))
output = tables.Column("output", verbose_name=_("Output"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
action = tables.Column("action", verbose_name=_("Action"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
<commit_msg>Add Task's output and parameters columns
Change-Id: I98f57a6a0178bb7258d82f3a165127f060f42f7b
Implements: blueprint mistral-ui<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
parameters = tables.Column("parameters", verbose_name=_("Parameters"))
output = tables.Column("output", verbose_name=_("Output"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
| # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
action = tables.Column("action", verbose_name=_("Action"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
Add Task's output and parameters columns
Change-Id: I98f57a6a0178bb7258d82f3a165127f060f42f7b
Implements: blueprint mistral-ui# -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
parameters = tables.Column("parameters", verbose_name=_("Parameters"))
output = tables.Column("output", verbose_name=_("Output"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
action = tables.Column("action", verbose_name=_("Action"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
<commit_msg>Add Task's output and parameters columns
Change-Id: I98f57a6a0178bb7258d82f3a165127f060f42f7b
Implements: blueprint mistral-ui<commit_after># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class ExecutionsTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("ID"),
link=("horizon:mistral:executions:tasks"))
wb_name = tables.Column("workbook_name", verbose_name=_("Workbook"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "executions"
verbose_name = _("Executions")
class TaskTable(tables.DataTable):
id = tables.Column("id", verbose_name=_("ID"))
name = tables.Column("name", verbose_name=_("Name"))
parameters = tables.Column("parameters", verbose_name=_("Parameters"))
output = tables.Column("output", verbose_name=_("Output"))
state = tables.Column("state", verbose_name=_("State"))
class Meta:
name = "tasks"
verbose_name = _("Tasks")
|
557cf0bc733c49e973a12bd14fb596af6a7fb5ff | refugeedata/admin.py | refugeedata/admin.py | from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
return super(BatchAdmin, self).save_related(
request, form, formsets, change)
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
| from django.core.management import call_command
from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
super(BatchAdmin, self).save_related(request, form, formsets, change)
call_command("export_card_data", str(form.instance.id), "--save")
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
| Call export_card_data on batch save | Call export_card_data on batch save
| Python | mit | ukch/refugeedata,ukch/refugeedata,ukch/refugeedata,ukch/refugeedata | from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
return super(BatchAdmin, self).save_related(
request, form, formsets, change)
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
Call export_card_data on batch save | from django.core.management import call_command
from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
super(BatchAdmin, self).save_related(request, form, formsets, change)
call_command("export_card_data", str(form.instance.id), "--save")
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
| <commit_before>from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
return super(BatchAdmin, self).save_related(
request, form, formsets, change)
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
<commit_msg>Call export_card_data on batch save<commit_after> | from django.core.management import call_command
from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
super(BatchAdmin, self).save_related(request, form, formsets, change)
call_command("export_card_data", str(form.instance.id), "--save")
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
| from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
return super(BatchAdmin, self).save_related(
request, form, formsets, change)
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
Call export_card_data on batch savefrom django.core.management import call_command
from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
super(BatchAdmin, self).save_related(request, form, formsets, change)
call_command("export_card_data", str(form.instance.id), "--save")
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
| <commit_before>from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
return super(BatchAdmin, self).save_related(
request, form, formsets, change)
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
<commit_msg>Call export_card_data on batch save<commit_after>from django.core.management import call_command
from django.contrib import admin
from refugeedata import models, forms
class NumberAdmin(admin.ModelAdmin):
list_display = ("number", "short_id", "active")
class BatchAdmin(admin.ModelAdmin):
list_display = ("registration_number_format", "data_file")
def get_form(self, request, obj=None, **kwargs):
if not obj:
kwargs["form"] = forms.BatchAdminForm
return super(BatchAdmin, self).get_form(request, obj=obj, **kwargs)
def save_related(self, request, form, formsets, change):
if not change: # create
numbers = form.cleaned_data["registration_numbers"]
models.RegistrationNumber.objects.bulk_create(numbers)
form.cleaned_data["registration_numbers"] = (
models.RegistrationNumber.objects.filter(
id__in=[n.id for n in numbers]))
super(BatchAdmin, self).save_related(request, form, formsets, change)
call_command("export_card_data", str(form.instance.id), "--save")
class LanguageAdmin(admin.ModelAdmin):
list_display = ("iso_code", "description", "example_text")
admin.site.register(models.RegistrationNumber, NumberAdmin)
admin.site.register(models.RegistrationCardBatch, BatchAdmin)
admin.site.register(models.Language, LanguageAdmin)
|
a4fa3b9866ac9712f029c7cabe64121f80875207 | biobox_cli/main.py | biobox_cli/main.py | """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys, string
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, string.strip),
F(filter, fn.is_not_empty)])
| """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, str.strip),
F(filter, fn.is_not_empty)])
| Use str methods instead of string module | Use str methods instead of string module
| Python | mit | bioboxes/command-line-interface,pbelmann/command-line-interface,michaelbarton/command-line-interface,michaelbarton/command-line-interface,bioboxes/command-line-interface,pbelmann/command-line-interface | """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys, string
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, string.strip),
F(filter, fn.is_not_empty)])
Use str methods instead of string module | """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, str.strip),
F(filter, fn.is_not_empty)])
| <commit_before>"""
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys, string
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, string.strip),
F(filter, fn.is_not_empty)])
<commit_msg>Use str methods instead of string module<commit_after> | """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, str.strip),
F(filter, fn.is_not_empty)])
| """
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys, string
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, string.strip),
F(filter, fn.is_not_empty)])
Use str methods instead of string module"""
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, str.strip),
F(filter, fn.is_not_empty)])
| <commit_before>"""
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys, string
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, string.strip),
F(filter, fn.is_not_empty)])
<commit_msg>Use str methods instead of string module<commit_after>"""
biobox - A command line interface for running biobox Docker containers
Usage:
biobox <command> <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
-v, --version Show version.
Commands:
run Run a biobox Docker image with input parameters
verify Verify that a Docker image matches the given specification type
login Log in to a biobox container with mounted test data
Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import sys
from fn import F
import biobox_cli.util.misc as util
import biobox_cli.util.functional as fn
def run():
args = input_args()
opts = util.parse_docopt(__doc__, args, True)
util.select_module("command", opts["<command>"]).run(args)
def input_args():
"""
Get command line args excluding those consisting of only whitespace
"""
return fn.thread([
sys.argv[1:],
F(map, str.strip),
F(filter, fn.is_not_empty)])
|
a6804dd0baefbbd9681edc2f0ba0ec13e84f5cc3 | nimp/utilities/paths.py | nimp/utilities/paths.py | # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
| # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if path is None:
return None
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
| Fix case where path is None in sanitize_path | Fix case where path is None in sanitize_path
| Python | mit | dontnod/nimp | # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
Fix case where path is None in sanitize_path | # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if path is None:
return None
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
| <commit_before># -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
<commit_msg>Fix case where path is None in sanitize_path<commit_after> | # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if path is None:
return None
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
| # -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
Fix case where path is None in sanitize_path# -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if path is None:
return None
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
| <commit_before># -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
<commit_msg>Fix case where path is None in sanitize_path<commit_after># -*- coding: utf-8 -*-
import os
import os.path
import sys
import fnmatch
import glob
from nimp.utilities.logging import *
from nimp.utilities.system import *
#-------------------------------------------------------------------------------
def split_path(path):
splitted_path = []
while True:
(path, folder) = os.path.split(path)
if folder != "":
splitted_path.insert(0, folder)
else:
if path != "":
splitted_path.insert(0, path)
break
return splitted_path
def sanitize_path(path):
if path is None:
return None
if is_windows() and not is_msys():
if path[0:1] == '/' and path[1:2].isalpha() and path[2:3] == '/':
return '%s:\\%s' % (path[1], path[3:].replace('/', '\\'))
if os.sep is '\\':
return path.replace('/', '\\')
# elif os.sep is '/':
return path.replace('\\', '/')
#-------------------------------------------------------------------------------
# This function is necessary because Python’s makedirs cannot create a
# directory such as "d:\data\foo/bar" because it’ll split it as "d:\data"
# and "foo/bar" then try to create a directory named "foo/bar".
def safe_makedirs(path):
path = sanitize_path(path)
try:
os.makedirs(path)
except FileExistsError:
# Maybe someone else created the directory for us; if so, ignore error
if os.path.exists(path):
return
raise
|
65c22394fad7929a7de1e78be7569a2895915dc9 | protocols/admin.py | protocols/admin.py | from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
| from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields =['name', 'protocol__number']
admin.site.register(Institution)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
| Add Topics index page customization | Add Topics index page customization
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
Add Topics index page customization | from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields =['name', 'protocol__number']
admin.site.register(Institution)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
| <commit_before>from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
<commit_msg>Add Topics index page customization<commit_after> | from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields =['name', 'protocol__number']
admin.site.register(Institution)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
| from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
Add Topics index page customizationfrom django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields =['name', 'protocol__number']
admin.site.register(Institution)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
| <commit_before>from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdmin(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
admin.site.register(Institution)
admin.site.register(Topic)
admin.site.register(Protocol, ProtocolAdmin)
<commit_msg>Add Topics index page customization<commit_after>from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(admin.ModelAdmin):
list_display = ['number', 'start_time', 'get_topics', 'information', 'majority', 'current_majority', 'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
search_fields =['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields =['name', 'protocol__number']
admin.site.register(Institution)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
|
22326bdd9265d8ae97055cbcc1f64939dd6bfcda | reviewboard/notifications/templatetags/markdown_email.py | reviewboard/notifications/templatetags/markdown_email.py | from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
| from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
| Remove a legacy Markdown extension when generating e-mails. | Remove a legacy Markdown extension when generating e-mails.
The recent updates for using Python-Markdown 3.x removed the
`smart_strong` extension from the main Markdown procssing, but failed to
remove it for the list of extensions used in e-mails. This is a trivial
change that simply removes that entry.
| Python | mit | reviewboard/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,chipx86/reviewboard | from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
Remove a legacy Markdown extension when generating e-mails.
The recent updates for using Python-Markdown 3.x removed the
`smart_strong` extension from the main Markdown procssing, but failed to
remove it for the list of extensions used in e-mails. This is a trivial
change that simply removes that entry. | from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
| <commit_before>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
<commit_msg>Remove a legacy Markdown extension when generating e-mails.
The recent updates for using Python-Markdown 3.x removed the
`smart_strong` extension from the main Markdown procssing, but failed to
remove it for the list of extensions used in e-mails. This is a trivial
change that simply removes that entry.<commit_after> | from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
| from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
Remove a legacy Markdown extension when generating e-mails.
The recent updates for using Python-Markdown 3.x removed the
`smart_strong` extension from the main Markdown procssing, but failed to
remove it for the list of extensions used in e-mails. This is a trivial
change that simply removes that entry.from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
| <commit_before>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
<commit_msg>Remove a legacy Markdown extension when generating e-mails.
The recent updates for using Python-Markdown 3.x removed the
`smart_strong` extension from the main Markdown procssing, but failed to
remove it for the list of extensions used in e-mails. This is a trivial
change that simply removes that entry.<commit_after>from __future__ import unicode_literals
import markdown
from django import template
from django.utils.safestring import mark_safe
from djblets.markdown import markdown_unescape
register = template.Library()
@register.filter
def markdown_email_html(text, is_rich_text):
if not is_rich_text:
return text
# We use XHTML1 instead of HTML5 to ensure the results can be parsed by
# an XML parser. This is actually needed for the main Markdown renderer
# for the web UI, but consistency is good here.
return mark_safe(markdown.markdown(
text,
output_format='xhtml1',
extensions=[
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
'markdown.extensions.tables',
'markdown.extensions.sane_lists',
'pymdownx.tilde',
'djblets.markdown.extensions.escape_html',
'djblets.markdown.extensions.wysiwyg_email',
],
extension_configs={
'codehilite': {
'noclasses': True,
},
}))
@register.filter
def markdown_email_text(text, is_rich_text):
if not is_rich_text:
return text
return markdown_unescape(text)
|
681cc1dc53851a2d127b4c00fc4e7d9e54bd8fba | cms/envs/devstack_docker.py | cms/envs/devstack_docker.py | """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
| """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
| Set LMS_BASE setting for Studio | Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634
| Python | agpl-3.0 | jolyonb/edx-platform,ahmedaljazzar/edx-platform,proversity-org/edx-platform,raccoongang/edx-platform,edx/edx-platform,hastexo/edx-platform,fintech-circle/edx-platform,Stanford-Online/edx-platform,eduNEXT/edunext-platform,jolyonb/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,a-parhom/edx-platform,prarthitm/edxplatform,gymnasium/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,EDUlib/edx-platform,CredoReference/edx-platform,philanthropy-u/edx-platform,fintech-circle/edx-platform,miptliot/edx-platform,TeachAtTUM/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,miptliot/edx-platform,romain-li/edx-platform,ESOedX/edx-platform,Lektorium-LLC/edx-platform,msegado/edx-platform,gsehub/edx-platform,TeachAtTUM/edx-platform,procangroup/edx-platform,gsehub/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,procangroup/edx-platform,mitocw/edx-platform,teltek/edx-platform,cpennington/edx-platform,CredoReference/edx-platform,msegado/edx-platform,angelapper/edx-platform,lduarte1991/edx-platform,proversity-org/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,msegado/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,fintech-circle/edx-platform,BehavioralInsightsTeam/edx-platform,philanthropy-u/edx-platform,pepeportela/edx-platform,ahmedaljazzar/edx-platform,edx/edx-platform,msegado/edx-platform,gymnasium/edx-platform,ahmedaljazzar/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,Stanford-Online/edx-platform,teltek/edx-platform,Lektorium-LLC/edx-platform,romain-li/edx-platform,philanthropy-u/edx-platform,CredoReference/edx-platform,a-parhom/edx-platform,cpennington/edx-platform,appsembler/edx-platform,romain-li/edx-platform,edx-solutions/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,BehavioralInsightsTeam/edx-platform,hastexo/edx-platform,teltek/edx-platform,hastexo/edx-platform,gymnasium/edx-platform,prarthitm/edxplatform,appsembler/edx-platform,romain-li/edx-platform,appsembler/edx-platform,TeachAtTUM/edx-platform,proversity-org/edx-platform,pabloborrego93/edx-platform,eduNEXT/edx-platform,Lektorium-LLC/edx-platform,romain-li/edx-platform,Edraak/edraak-platform,edx/edx-platform,kmoocdev2/edx-platform,pabloborrego93/edx-platform,philanthropy-u/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,gsehub/edx-platform,arbrandes/edx-platform,pepeportela/edx-platform,stvstnfrd/edx-platform,procangroup/edx-platform,Stanford-Online/edx-platform,Lektorium-LLC/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,raccoongang/edx-platform,BehavioralInsightsTeam/edx-platform,prarthitm/edxplatform,edx-solutions/edx-platform,lduarte1991/edx-platform,angelapper/edx-platform,TeachAtTUM/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,stvstnfrd/edx-platform,ESOedX/edx-platform,raccoongang/edx-platform,Stanford-Online/edx-platform,BehavioralInsightsTeam/edx-platform,a-parhom/edx-platform,hastexo/edx-platform,CredoReference/edx-platform,raccoongang/edx-platform,kmoocdev2/edx-platform,edx/edx-platform,proversity-org/edx-platform,miptliot/edx-platform,mitocw/edx-platform,angelapper/edx-platform,Edraak/edraak-platform,cpennington/edx-platform,teltek/edx-platform,eduNEXT/edunext-platform,fintech-circle/edx-platform,procangroup/edx-platform,edx-solutions/edx-platform,lduarte1991/edx-platform,miptliot/edx-platform,ahmedaljazzar/edx-platform,a-parhom/edx-platform,eduNEXT/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,jolyonb/edx-platform,mitocw/edx-platform,prarthitm/edxplatform,pabloborrego93/edx-platform,gymnasium/edx-platform,EDUlib/edx-platform,pabloborrego93/edx-platform | """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634 | """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
| <commit_before>""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
<commit_msg>Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634<commit_after> | """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
| """ Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
| <commit_before>""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
<commit_msg>Set LMS_BASE setting for Studio
This allows previews in LMS to work properly.
ECOM-6634<commit_after>""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
HOST = 'edx.devstack.edxapp:18000'
SITE_NAME = HOST
LMS_ROOT_URL = 'http://{}:18000'.format(HOST)
# This is the public-facing host used for previews
LMS_BASE = 'localhost:18000'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
594cd5d490786bbbdcf877d8c155530c36acd2c1 | src/services/TemperatureMonitor/src/temperature.py | src/services/TemperatureMonitor/src/temperature.py | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return result | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result | Remove Smoothing From Temp Sensor | Remove Smoothing From Temp Sensor
| Python | mit | IAPark/PITherm | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return resultRemove Smoothing From Temp Sensor | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result | <commit_before>import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return result<commit_msg>Remove Smoothing From Temp Sensor<commit_after> | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result | import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return resultRemove Smoothing From Temp Sensorimport smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result | <commit_before>import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
# smooth the data slightly
history_length = 3
for t in self.temp_history:
if abs(t - temp) > 0.2:
result = self.last_temp
break
self.temp_history.append(temp)
self.temp_history = self.temp_history[0:history_length]
self.last_temp = result
return result<commit_msg>Remove Smoothing From Temp Sensor<commit_after>import smbus
class TemperatureSensor:
temp_history = []
last_temp = 0
def __init__(self, address):
self.bus = smbus.SMBus(1)
self.address = address
def get_temp(self):
MSB = self.bus.read_byte_data(self.address, 0)
LSB = self.bus.read_byte_data(self.address, 1)
temp = ((MSB << 8 | LSB) >> 4) * 0.0625
result = temp
return result |
70f5a3fd7e28c574912a0318eba83f11789c2c7b | ankieta/contact/urls.py | ankieta/contact/urls.py | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
| from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^/success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
| Fix URLS in contact form | Fix URLS in contact form
| Python | bsd-3-clause | watchdogpolska/prezydent.siecobywatelska.pl,watchdogpolska/prezydent.siecobywatelska.pl,watchdogpolska/prezydent.siecobywatelska.pl | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
Fix URLS in contact form | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^/success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
| <commit_before>from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
<commit_msg>Fix URLS in contact form<commit_after> | from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^/success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
| from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
Fix URLS in contact formfrom django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^/success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
| <commit_before>from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
<commit_msg>Fix URLS in contact form<commit_after>from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(r'^$', views.ContactView.as_view(), name="form"),
url(r'^/success$', TemplateView.as_view(template_name="contact/success.html"), name="success"),
)
|
72d33ea47458cace13dac920ce2a82e55f83caba | statsmodels/stats/tests/test_outliers_influence.py | statsmodels/stats/tests/test_outliers_influence.py | from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
| from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
| Add pandas dataframe capability in variance_inflation_factor | ENH: Add pandas dataframe capability in variance_inflation_factor
| Python | bsd-3-clause | bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels | from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
ENH: Add pandas dataframe capability in variance_inflation_factor | from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
| <commit_before>from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
<commit_msg>ENH: Add pandas dataframe capability in variance_inflation_factor<commit_after> | from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
| from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
ENH: Add pandas dataframe capability in variance_inflation_factorfrom numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
| <commit_before>from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
<commit_msg>ENH: Add pandas dataframe capability in variance_inflation_factor<commit_after>from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
e787e4981441198e2b015b1b4f4971fbc112c78b | cyder/base/eav/utils.py | cyder/base/eav/utils.py | import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, _hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
| import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, is_hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
| Fix yet another stupid mistake | Fix yet another stupid mistake
| Python | bsd-3-clause | akeym/cyder,murrown/cyder,murrown/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,drkitty/cyder,murrown/cyder,akeym/cyder,zeeman/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,OSU-Net/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder | import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, _hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
Fix yet another stupid mistake | import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, is_hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
| <commit_before>import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, _hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
<commit_msg>Fix yet another stupid mistake<commit_after> | import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, is_hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
| import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, _hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
Fix yet another stupid mistakeimport re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, is_hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
| <commit_before>import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, _hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
<commit_msg>Fix yet another stupid mistake<commit_after>import re
from django.core.exceptions import ValidationError
default_validator = lambda x: x != '' # FIXME: Do we need this?
def validate_list(value, validator=default_validator, separator=',',
strip_whitespace=True, min_length=0, die=False):
"""Validate a "list" of things
separator: the char that separates list items (None means whitespace)
allow_whitespace: whether to strip whitespace around separators before
validating (unnecessary if separator is None)
Returns whether validator returned True for every item in value. Note that
this is not terribly useful.
"""
items = value.split(separator)
length = len(items)
all_valid = all([validator(x.strip() if strip_whitespace else x)
for x in items])
if not all_valid:
if die:
raise ValidationError("One or more list items are invalid")
else:
return False
elif length < min_length:
if die:
raise ValidationError("List must contain at least {0} items"
.format(length))
else:
return False
else:
return True
def is_hex_byte(value):
return bool(re.match(r'^[0-9a-fA-F]{2}$', value))
def is_hex_byte_sequence(value):
return validate_list(value, is_hex_byte, separator=':',
strip_whitespace=False)
def strip_and_get_base(value):
if value.startswith('0x'):
value = value[len('0x'):]
base = 16
else:
base = 10
return (value, base)
|
295fc64b8fac9852e92356f61ff4698e011c798e | seam/util.py | seam/util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename()),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename(ext='err')),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
| Make xvfb e files end with .err | Make xvfb e files end with .err
| Python | mit | VUIIS/seam,VUIIS/seam | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename()),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
Make xvfb e files end with .err | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename(ext='err')),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename()),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
<commit_msg>Make xvfb e files end with .err<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename(ext='err')),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename()),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
Make xvfb e files end with .err#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename(ext='err')),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename()),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
<commit_msg>Make xvfb e files end with .err<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" util.py
Utility functions/constants across seam
"""
__author__ = 'Scott Burns <scott.s.burns@vanderbilt.edu>'
__copyright__ = 'Copyright 2014 Vanderbilt University. All Rights Reserved'
import sys
PY2 = sys.version_info[0] == 2
if PY2:
STRING_TYPE = basestring
else:
STRING_TYPE = str
import os
from string import digits, ascii_letters
from random import choice
total = digits + ascii_letters
def get_tmp_filename(ext='out', basename='/tmp', fname_length=32):
fname = ''.join(choice(total) for _ in range(fname_length))
return os.path.join(basename, '{}.{}'.format(fname, ext))
def wrap_with_xvfb(command, wait=5, server_args='-screen 0, 1600x1200x24'):
parts = ['xvfb-run',
'-a', # automatically get a free server number
'-f {}'.format(get_tmp_filename()),
'-e {}'.format(get_tmp_filename(ext='err')),
'--wait={:d}'.format(wait),
'--server-args="{}"'.format(server_args),
command]
return ' '.join(parts)
|
d18c715f8f0d86d58fcec4df8710f0370ff87308 | channels/worker.py | channels/worker.py | from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
| from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
| Fix core logging when no message on channel | Fix core logging when no message on channel
| Python | bsd-3-clause | Krukov/channels,Coread/channels,Krukov/channels,raiderrobert/channels,Coread/channels,andrewgodwin/django-channels,andrewgodwin/channels,linuxlewis/channels,django/channels,raphael-boucher/channels | from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
Fix core logging when no message on channel | from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
| <commit_before>from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
<commit_msg>Fix core logging when no message on channel<commit_after> | from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
| from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
Fix core logging when no message on channelfrom __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
| <commit_before>from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
<commit_msg>Fix core logging when no message on channel<commit_after>from __future__ import unicode_literals
import logging
import time
from .message import Message
from .utils import name_that_thing
logger = logging.getLogger('django.channels')
class Worker(object):
"""
A "worker" process that continually looks for available messages to run
and runs their consumers.
"""
def __init__(self, channel_layer, callback=None):
self.channel_layer = channel_layer
self.callback = callback
def run(self):
"""
Tries to continually dispatch messages to consumers.
"""
channels = self.channel_layer.registry.all_channel_names()
while True:
channel, content = self.channel_layer.receive_many(channels, block=True)
# If no message, stall a little to avoid busy-looping then continue
if channel is None:
time.sleep(0.01)
continue
# Create message wrapper
logger.debug("Worker got message on %s: repl %s", channel, content.get("reply_channel", "none"))
message = Message(
content=content,
channel_name=channel,
channel_layer=self.channel_layer,
)
# Handle the message
consumer = self.channel_layer.registry.consumer_for_channel(channel)
if self.callback:
self.callback(channel, message)
try:
consumer(message)
except:
logger.exception("Error processing message with consumer %s:", name_that_thing(consumer))
|
da3c8b01512466e8b0c71a239378b8643d5384ef | tests/state_tests.py | tests/state_tests.py | """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
| """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
| Use same tagname but different scope | Use same tagname but different scope
| Python | mit | remmihsorp/minicps,scy-phy/minicps,remmihsorp/minicps,scy-phy/minicps | """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
Use same tagname but different scope | """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
| <commit_before>"""
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
<commit_msg>Use same tagname but different scope<commit_after> | """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
| """
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
Use same tagname but different scope"""
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
| <commit_before>"""
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
<commit_msg>Use same tagname but different scope<commit_after>"""
state_tests.py
"""
import os
from os.path import splitext
from minicps.state import SQLiteState
def test_SQLiteState():
# TODO: change to /tmp when install SQLitesutdio in ubuntu
PATH = "temp/state_test_db.sqlite"
# sqlite use text instead of VARCHAR
SCHEMA = """
CREATE TABLE state_test (
name TEXT NOT NULL,
datatype TEXT NOT NULL,
value TEXT,
pid INTEGER NOT NULL,
PRIMARY KEY (name, pid)
);
"""
SCHEMA_INIT = """
INSERT INTO state_test VALUES ('SENSOR1', 'int', '0', 1);
INSERT INTO state_test VALUES ('SENSOR2', 'float', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 1);
INSERT INTO state_test VALUES ('SENSOR3', 'int', '0.0', 2);
INSERT INTO state_test VALUES ('ACTUATOR1', 'int', '1', 1);
INSERT INTO state_test VALUES ('ACTUATOR2', 'int', '0', 1);
"""
path, extension = splitext(PATH)
sqlite_state = SQLiteState(path, extension)
os.remove(PATH)
sqlite_state._create(PATH, SCHEMA)
sqlite_state._init(PATH, SCHEMA_INIT)
# sqlite_state._delete()
|
c820e3ed4d78b975a6bdff54a2ecae26354ae10e | tests/test_itunes.py | tests/test_itunes.py | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
| """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
| Add `parse_value` test for AppleScript dates | Add `parse_value` test for AppleScript dates
Added a test case to `parse_value` to parse dates returned in
AppleScript responses.
| Python | mit | adanoff/iTunesTUI | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
Add `parse_value` test for AppleScript dates
Added a test case to `parse_value` to parse dates returned in
AppleScript responses. | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
| <commit_before>"""
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
<commit_msg>Add `parse_value` test for AppleScript dates
Added a test case to `parse_value` to parse dates returned in
AppleScript responses.<commit_after> | """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
| """
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
Add `parse_value` test for AppleScript dates
Added a test case to `parse_value` to parse dates returned in
AppleScript responses."""
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
| <commit_before>"""
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
<commit_msg>Add `parse_value` test for AppleScript dates
Added a test case to `parse_value` to parse dates returned in
AppleScript responses.<commit_after>"""
test_itunes.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
|
d69b137bd19e0363173b120ff4f68becc6be7b3c | mama_cas/tests/backends.py | mama_cas/tests/backends.py | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
| from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
| Use get_user_model within test backend | Use get_user_model within test backend
| Python | bsd-3-clause | orbitvu/django-mama-cas,harlov/django-mama-cas,forcityplatform/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,forcityplatform/django-mama-cas,jbittel/django-mama-cas,harlov/django-mama-cas | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
Use get_user_model within test backend | from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
<commit_msg>Use get_user_model within test backend<commit_after> | from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
| from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
Use get_user_model within test backendfrom django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
| <commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
<commit_msg>Use get_user_model within test backend<commit_after>from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
|
46077269450f98505308736251b3f08ed3c6827f | scripts/poweron/DRAC.py | scripts/poweron/DRAC.py | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | Change path to the supplemental pack | CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com>
| Python | lgpl-2.1 | Frezzle/xen-api,vasilenkomike/xen-api,simonjbeaumont/xen-api,euanh/xen-api,cheng-z/xen-api,jjd27/xen-api,vasilenkomike/xen-api,rafalmiel/xen-api,robertbreker/xen-api,koushikcgit/xen-api,huizh/xen-api,agimofcarmen/xen-api,cheng--zhang/xen-api,salvocambria/xen-api,jjd27/xen-api,djs55/xen-api,thomassa/xen-api,huizh/xen-api,rafalmiel/xen-api,salvocambria/xen-api,robertbreker/xen-api,cheng--zhang/xen-api,thomassa/xen-api,srowe/xen-api,koushikcgit/xen-api,koushikcgit/xen-api,robertbreker/xen-api,robertbreker/xen-api,simonjbeaumont/xen-api,anoobs/xen-api,srowe/xen-api,cheng-z/xen-api,jjd27/xen-api,huizh/xen-api,guard163/xen-api,guard163/xen-api,salvocambria/xen-api,jjd27/xen-api,rafalmiel/xen-api,djs55/xen-api,Frezzle/xen-api,jjd27/xen-api,srowe/xen-api,anoobs/xen-api,huizh/xen-api,agimofcarmen/xen-api,robertbreker/xen-api,cheng-z/xen-api,jjd27/xen-api,guard163/xen-api,cheng-z/xen-api,guard163/xen-api,cheng-z/xen-api,simonjbeaumont/xen-api,anoobs/xen-api,djs55/xen-api,huizh/xen-api,thomassa/xen-api,Frezzle/xen-api,djs55/xen-api,simonjbeaumont/xen-api,cheng-z/xen-api,djs55/xen-api,rafalmiel/xen-api,robertbreker/xen-api,euanh/xen-api,cheng--zhang/xen-api,agimofcarmen/xen-api,rafalmiel/xen-api,ravippandey/xen-api,guard163/xen-api,salvocambria/xen-api,cheng--zhang/xen-api,cheng-z/xen-api,euanh/xen-api,cheng--zhang/xen-api,ravippandey/xen-api,vasilenkomike/xen-api,vasilenkomike/xen-api,koushikcgit/xen-api,thomassa/xen-api,ravippandey/xen-api,euanh/xen-api,euanh/xen-api,thomassa/xen-api,srowe/xen-api,ravippandey/xen-api,thomassa/xen-api,djs55/xen-api,Frezzle/xen-api,ravippandey/xen-api,cheng--zhang/xen-api,Frezzle/xen-api,euanh/xen-api,guard163/xen-api,cheng--zhang/xen-api,koushikcgit/xen-api,salvocambria/xen-api,koushikcgit/xen-api,anoobs/xen-api,agimofcarmen/xen-api,srowe/xen-api,agimofcarmen/xen-api,vasilenkomike/xen-api,agimofcarmen/xen-api,simonjbeaumont/xen-api,rafalmiel/xen-api,ravippandey/xen-api,simonjbeaumont/xen-api,salvocambria/xen-api,anoobs/xen-api,vasilenkomike/xen-api,huizh/xen-api,anoobs/xen-api | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main()CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com> | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | <commit_before>import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main()<commit_msg>CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com><commit_after> | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main()CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com>import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | <commit_before>import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main()<commit_msg>CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com><commit_after>import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() |
1fe22f9750c618ede99f9b0a0d088aa67b7929a1 | stock_available_unreserved/models/quant.py | stock_available_unreserved/models/quant.py | # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
| # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
# Avoid error when adding a new line on manually Update Quantity
if isinstance(record.id, models.NewId):
record.contains_unreserved = False
continue
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
| Fix compute contains_unreserved on NewId records | [FIX] Fix compute contains_unreserved on NewId records
| Python | agpl-3.0 | OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse | # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
[FIX] Fix compute contains_unreserved on NewId records | # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
# Avoid error when adding a new line on manually Update Quantity
if isinstance(record.id, models.NewId):
record.contains_unreserved = False
continue
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
| <commit_before># Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
<commit_msg>[FIX] Fix compute contains_unreserved on NewId records<commit_after> | # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
# Avoid error when adding a new line on manually Update Quantity
if isinstance(record.id, models.NewId):
record.contains_unreserved = False
continue
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
| # Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
[FIX] Fix compute contains_unreserved on NewId records# Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
# Avoid error when adding a new line on manually Update Quantity
if isinstance(record.id, models.NewId):
record.contains_unreserved = False
continue
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
| <commit_before># Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
<commit_msg>[FIX] Fix compute contains_unreserved on NewId records<commit_after># Copyright 2018 Camptocamp SA
# Copyright 2016-19 ForgeFlow S.L. (https://www.forgeflow.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockQuant(models.Model):
_inherit = "stock.quant"
contains_unreserved = fields.Boolean(
string="Contains unreserved products",
compute="_compute_contains_unreserved",
store=True,
)
@api.depends("product_id", "location_id", "quantity", "reserved_quantity")
def _compute_contains_unreserved(self):
for record in self:
# Avoid error when adding a new line on manually Update Quantity
if isinstance(record.id, models.NewId):
record.contains_unreserved = False
continue
available = record._get_available_quantity(
record.product_id, record.location_id
)
record.contains_unreserved = True if available > 0 else False
|
828b78767c17419513337ca29b5c2dab08995714 | ctypeslib/test/test_dynmodule.py | ctypeslib/test/test_dynmodule.py | # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
| # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
| Remove now useless TearDown method. | Remove now useless TearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@53797 6015fed2-1504-0410-9fe1-9d1591cc4771
| Python | mit | trolldbois/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib | # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
Remove now useless TearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@53797 6015fed2-1504-0410-9fe1-9d1591cc4771 | # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
| <commit_before># Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
<commit_msg>Remove now useless TearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@53797 6015fed2-1504-0410-9fe1-9d1591cc4771<commit_after> | # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
| # Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
Remove now useless TearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@53797 6015fed2-1504-0410-9fe1-9d1591cc4771# Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
| <commit_before># Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def tearDown(self):
for fnm in glob.glob(stdio._gen_basename + ".*"):
try:
os.remove(fnm)
except IOError:
pass
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
<commit_msg>Remove now useless TearDown method.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@53797 6015fed2-1504-0410-9fe1-9d1591cc4771<commit_after># Basic test of dynamic code generation
import unittest
import os, glob
import stdio
from ctypes import POINTER, c_int
class DynModTest(unittest.TestCase):
def test_fopen(self):
self.failUnlessEqual(stdio.fopen.restype, POINTER(stdio.FILE))
self.failUnlessEqual(stdio.fopen.argtypes, [stdio.STRING, stdio.STRING])
def test_constants(self):
self.failUnlessEqual(stdio.O_RDONLY, 0)
self.failUnlessEqual(stdio.O_WRONLY, 1)
self.failUnlessEqual(stdio.O_RDWR, 2)
def test_compiler_errors(self):
from ctypeslib.codegen.cparser import CompilerError
from ctypeslib.dynamic_module import include
self.failUnlessRaises(CompilerError, lambda: include("#error"))
if __name__ == "__main__":
unittest.main()
|
ddb79d01e7ae0c840a3f3181a600aae34613c4e5 | login_token/models.py | login_token/models.py | import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
| import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
def __repr__(self):
repr_format = '<LoginToken: "%s" user="%s" instance="%s">'
return repr_format % (self.token,
self.user.username,
self.instance.label)
| Add a __repr__ method for LoginToken | Add a __repr__ method for LoginToken
| Python | agpl-3.0 | opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit | import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
Add a __repr__ method for LoginToken | import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
def __repr__(self):
repr_format = '<LoginToken: "%s" user="%s" instance="%s">'
return repr_format % (self.token,
self.user.username,
self.instance.label)
| <commit_before>import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
<commit_msg>Add a __repr__ method for LoginToken<commit_after> | import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
def __repr__(self):
repr_format = '<LoginToken: "%s" user="%s" instance="%s">'
return repr_format % (self.token,
self.user.username,
self.instance.label)
| import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
Add a __repr__ method for LoginTokenimport random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
def __repr__(self):
repr_format = '<LoginToken: "%s" user="%s" instance="%s">'
return repr_format % (self.token,
self.user.username,
self.instance.label)
| <commit_before>import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
<commit_msg>Add a __repr__ method for LoginToken<commit_after>import random
import re
from django.contrib.auth.models import User
from django.db import models
from instances.models import InstanceMixin
NUMBER_OF_TOKEN_WORDS = 3
def generate_token():
def useful_word(w):
# FIXME: should try to exclude offensive words
if len(w) < 4:
return False
if re.search('^[a-z]*$', w):
return True
words = []
with open('/usr/share/dict/words') as fp:
for line in fp:
word = line.strip()
if useful_word(word):
words.append(word)
return " ".join(random.choice(words)
for i in range(NUMBER_OF_TOKEN_WORDS))
class LoginToken(InstanceMixin, models.Model):
'''Represents a readable login token for mobile devices
To enable logging in to a SayIt instance as a particular user, we
ask the user to type in a three word phrase; this model records
tokens that allow login for a particular instance by a particular
user.'''
user = models.ForeignKey(User)
token = models.TextField(max_length=255,
default=generate_token)
def regenerate_token(self):
token = generate_token()
token.save()
def __repr__(self):
repr_format = '<LoginToken: "%s" user="%s" instance="%s">'
return repr_format % (self.token,
self.user.username,
self.instance.label)
|
23e3197f15d13445defe6ec7cfb4f08484089068 | tests/test_scripts/test_simulate_data.py | tests/test_scripts/test_simulate_data.py | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data | Use few nsteps for testing sim-script | Use few nsteps for testing sim-script
| Python | apache-2.0 | SKA-ScienceDataProcessor/FastImaging-Python,SKA-ScienceDataProcessor/FastImaging-Python | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_dataUse few nsteps for testing sim-script | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data | <commit_before>import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data<commit_msg>Use few nsteps for testing sim-script<commit_after> | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data | import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_dataUse few nsteps for testing sim-scriptimport json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data | <commit_before>import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data<commit_msg>Use few nsteps for testing sim-script<commit_after>import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data |
5972644fe7d0267849440d8e60509baba6e013a3 | test/test_exception.py | test/test_exception.py | from mock import MagicMock
import pyaem
import unittest
class TestPyAemException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() | import pyaem
import unittest
class TestException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() | Rename class name to be consistent with file name. Remove unused import. | Rename class name to be consistent with file name. Remove unused import.
| Python | mit | Sensis/pyaem,wildone/pyaem | from mock import MagicMock
import pyaem
import unittest
class TestPyAemException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main()Rename class name to be consistent with file name. Remove unused import. | import pyaem
import unittest
class TestException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() | <commit_before>from mock import MagicMock
import pyaem
import unittest
class TestPyAemException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main()<commit_msg>Rename class name to be consistent with file name. Remove unused import.<commit_after> | import pyaem
import unittest
class TestException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() | from mock import MagicMock
import pyaem
import unittest
class TestPyAemException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main()Rename class name to be consistent with file name. Remove unused import.import pyaem
import unittest
class TestException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() | <commit_before>from mock import MagicMock
import pyaem
import unittest
class TestPyAemException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main()<commit_msg>Rename class name to be consistent with file name. Remove unused import.<commit_after>import pyaem
import unittest
class TestException(unittest.TestCase):
def test_init(self):
exception = pyaem.PyAemException(123, 'somemessage')
self.assertEqual(exception.code, 123)
self.assertEqual(exception.message, 'somemessage')
if __name__ == '__main__':
unittest.main() |
dfea77df6e6ba27bada1c80da6efab392507736b | forklift/services/satellite.py | forklift/services/satellite.py | #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
daemon=True,
)
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
| #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
payload.daemon = True
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
| Fix making threads daemonic on Python 3.2 | Fix making threads daemonic on Python 3.2
| Python | apache-2.0 | infoxchange/docker-forklift,infoxchange/docker-forklift | #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
daemon=True,
)
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
Fix making threads daemonic on Python 3.2 | #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
payload.daemon = True
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
| <commit_before>#
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
daemon=True,
)
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
<commit_msg>Fix making threads daemonic on Python 3.2<commit_after> | #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
payload.daemon = True
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
| #
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
daemon=True,
)
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
Fix making threads daemonic on Python 3.2#
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
payload.daemon = True
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
| <commit_before>#
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
daemon=True,
)
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
<commit_msg>Fix making threads daemonic on Python 3.2<commit_after>#
# Copyright 2014 Infoxchange Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Satellite processes started by Forklift itself to provide services.
"""
import os
import threading
from time import sleep
def start_satellite(target, args=(), kwargs=None, stop=None):
"""
Start a process configured to run the target but kill it after the parent
exits.
"""
if kwargs is None:
kwargs = {}
pid = os.fork()
if pid == 0:
# Run target daemonized.
payload = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
payload.daemon = True
payload.start()
# Cannot wait for the process that's not our child
ppid = os.getppid()
try:
while True:
os.kill(ppid, 0)
sleep(1)
except OSError:
if stop:
stop()
os._exit(os.EX_OK) # pylint:disable=protected-access
|
d41af20b1bdf5b630962a2e474b5d9c7ed62cd5c | nuxeo-drive-client/nxdrive/gui/resources.py | nuxeo-drive-client/nxdrive/gui/resources.py | """Helper to lookup UI resources from package"""
import re
import os
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon on various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
cxfreeze_suffix = os.path.join('library.zip', 'nxdrive')
app_resources = '/Contents/Resources/'
if app_resources in nxdrive_path:
# OSX frozen distribution, bundled as an app
icons_path = re.sub(app_resources + ".*", app_resources + 'icons',
nxdrive_path)
elif nxdrive_path.endswith(cxfreeze_suffix):
# Frozen distribution of nxdrive, data is out of the zip
icons_path = nxdrive_path.replace(cxfreeze_suffix, 'icons')
if not os.path.exists(icons_path):
log.warning("Could not find the icons folder at: %s", icons_path)
return None
icon_filepath = os.path.join(icons_path, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
| """Helper to lookup UI resources from package"""
import os
from nxdrive.logging_config import get_logger
from nxdrive.utils import find_resource_dir
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon in various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
icons_dir = find_resource_dir('icons', icons_path)
if icons_dir is None:
log.warning("Could not find icon file %s as icons directory"
" could not be found",
icon_filename)
return None
icon_filepath = os.path.join(icons_dir, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
| Use generic resource directory finder for icon files | NXP-12694: Use generic resource directory finder for icon files
| Python | lgpl-2.1 | arameshkumar/base-nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,arameshkumar/nuxeo-drive,ssdi-drive/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,DirkHoffmann/nuxeo-drive,loopingz/nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,ssdi-drive/nuxeo-drive,DirkHoffmann/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,IsaacYangSLA/nuxeo-drive,IsaacYangSLA/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/base-nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,ssdi-drive/nuxeo-drive,arameshkumar/nuxeo-drive,IsaacYangSLA/nuxeo-drive | """Helper to lookup UI resources from package"""
import re
import os
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon on various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
cxfreeze_suffix = os.path.join('library.zip', 'nxdrive')
app_resources = '/Contents/Resources/'
if app_resources in nxdrive_path:
# OSX frozen distribution, bundled as an app
icons_path = re.sub(app_resources + ".*", app_resources + 'icons',
nxdrive_path)
elif nxdrive_path.endswith(cxfreeze_suffix):
# Frozen distribution of nxdrive, data is out of the zip
icons_path = nxdrive_path.replace(cxfreeze_suffix, 'icons')
if not os.path.exists(icons_path):
log.warning("Could not find the icons folder at: %s", icons_path)
return None
icon_filepath = os.path.join(icons_path, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
NXP-12694: Use generic resource directory finder for icon files | """Helper to lookup UI resources from package"""
import os
from nxdrive.logging_config import get_logger
from nxdrive.utils import find_resource_dir
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon in various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
icons_dir = find_resource_dir('icons', icons_path)
if icons_dir is None:
log.warning("Could not find icon file %s as icons directory"
" could not be found",
icon_filename)
return None
icon_filepath = os.path.join(icons_dir, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
| <commit_before>"""Helper to lookup UI resources from package"""
import re
import os
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon on various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
cxfreeze_suffix = os.path.join('library.zip', 'nxdrive')
app_resources = '/Contents/Resources/'
if app_resources in nxdrive_path:
# OSX frozen distribution, bundled as an app
icons_path = re.sub(app_resources + ".*", app_resources + 'icons',
nxdrive_path)
elif nxdrive_path.endswith(cxfreeze_suffix):
# Frozen distribution of nxdrive, data is out of the zip
icons_path = nxdrive_path.replace(cxfreeze_suffix, 'icons')
if not os.path.exists(icons_path):
log.warning("Could not find the icons folder at: %s", icons_path)
return None
icon_filepath = os.path.join(icons_path, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
<commit_msg>NXP-12694: Use generic resource directory finder for icon files<commit_after> | """Helper to lookup UI resources from package"""
import os
from nxdrive.logging_config import get_logger
from nxdrive.utils import find_resource_dir
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon in various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
icons_dir = find_resource_dir('icons', icons_path)
if icons_dir is None:
log.warning("Could not find icon file %s as icons directory"
" could not be found",
icon_filename)
return None
icon_filepath = os.path.join(icons_dir, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
| """Helper to lookup UI resources from package"""
import re
import os
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon on various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
cxfreeze_suffix = os.path.join('library.zip', 'nxdrive')
app_resources = '/Contents/Resources/'
if app_resources in nxdrive_path:
# OSX frozen distribution, bundled as an app
icons_path = re.sub(app_resources + ".*", app_resources + 'icons',
nxdrive_path)
elif nxdrive_path.endswith(cxfreeze_suffix):
# Frozen distribution of nxdrive, data is out of the zip
icons_path = nxdrive_path.replace(cxfreeze_suffix, 'icons')
if not os.path.exists(icons_path):
log.warning("Could not find the icons folder at: %s", icons_path)
return None
icon_filepath = os.path.join(icons_path, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
NXP-12694: Use generic resource directory finder for icon files"""Helper to lookup UI resources from package"""
import os
from nxdrive.logging_config import get_logger
from nxdrive.utils import find_resource_dir
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon in various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
icons_dir = find_resource_dir('icons', icons_path)
if icons_dir is None:
log.warning("Could not find icon file %s as icons directory"
" could not be found",
icon_filename)
return None
icon_filepath = os.path.join(icons_dir, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
| <commit_before>"""Helper to lookup UI resources from package"""
import re
import os
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon on various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
cxfreeze_suffix = os.path.join('library.zip', 'nxdrive')
app_resources = '/Contents/Resources/'
if app_resources in nxdrive_path:
# OSX frozen distribution, bundled as an app
icons_path = re.sub(app_resources + ".*", app_resources + 'icons',
nxdrive_path)
elif nxdrive_path.endswith(cxfreeze_suffix):
# Frozen distribution of nxdrive, data is out of the zip
icons_path = nxdrive_path.replace(cxfreeze_suffix, 'icons')
if not os.path.exists(icons_path):
log.warning("Could not find the icons folder at: %s", icons_path)
return None
icon_filepath = os.path.join(icons_path, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
<commit_msg>NXP-12694: Use generic resource directory finder for icon files<commit_after>"""Helper to lookup UI resources from package"""
import os
from nxdrive.logging_config import get_logger
from nxdrive.utils import find_resource_dir
log = get_logger(__name__)
def find_icon(icon_filename):
"""Find the FS path of an icon in various OS binary packages"""
import nxdrive
nxdrive_path = os.path.dirname(nxdrive.__file__)
icons_path = os.path.join(nxdrive_path, 'data', 'icons')
icons_dir = find_resource_dir('icons', icons_path)
if icons_dir is None:
log.warning("Could not find icon file %s as icons directory"
" could not be found",
icon_filename)
return None
icon_filepath = os.path.join(icons_dir, icon_filename)
if not os.path.exists(icon_filepath):
log.warning("Could not find icon file: %s", icon_filepath)
return None
return icon_filepath
|
72dea9616a84cefd8424f965060552c84cfd241d | tests/test_luabject.py | tests/test_luabject.py | try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after loading an incorrect script raises an exception.
_luabject.load_script(state, "function foo() prant() end")
| try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
# Loading a syntactically correct script that causes an error raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "hi()")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
| Test unrunnable script exceptions too | Test unrunnable script exceptions too
| Python | mit | markpasc/luabject,markpasc/luabject | try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after loading an incorrect script raises an exception.
_luabject.load_script(state, "function foo() prant() end")
Test unrunnable script exceptions too | try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
# Loading a syntactically correct script that causes an error raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "hi()")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
| <commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after loading an incorrect script raises an exception.
_luabject.load_script(state, "function foo() prant() end")
<commit_msg>Test unrunnable script exceptions too<commit_after> | try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
# Loading a syntactically correct script that causes an error raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "hi()")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
| try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after loading an incorrect script raises an exception.
_luabject.load_script(state, "function foo() prant() end")
Test unrunnable script exceptions tootry:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
# Loading a syntactically correct script that causes an error raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "hi()")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
| <commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after loading an incorrect script raises an exception.
_luabject.load_script(state, "function foo() prant() end")
<commit_msg>Test unrunnable script exceptions too<commit_after>try:
import unittest2 as unittest
except ImportError:
import unittest
from village import _luabject
class TestDirect(unittest.TestCase):
def test_new(self):
state = _luabject.new()
# PyCObject isn't available to assertIsInstance, so:
self.assertEqual(type(state).__name__, 'PyCObject')
def test_load_script(self):
state = _luabject.new()
_luabject.load_script(state, "")
# Can load multiple scripts in one state.
_luabject.load_script(state, "")
# Can load a syntactically correct script.
state = _luabject.new()
_luabject.load_script(state, "function foo() prant() end")
# Can load multiple syntactically correct scripts in one state.
_luabject.load_script(state, "function bar() prant() end")
# Loading a syntactically incorrect script raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "1+1")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
# Loading a syntactically correct script that causes an error raises an exception.
state = _luabject.new()
with self.assertRaises(ValueError):
_luabject.load_script(state, "hi()")
# Can load a syntactically correct script even after a load_script() exception.
_luabject.load_script(state, "function foo() prant() end")
|
9f20f232a9507f0002adc682a87bb792f6fbdd4e | django_plim/template.py | django_plim/template.py | #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
| #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
from django.template.loaders import app_directories
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
class Template(MakoTemplate):
def render(self, context):
context_dict = {}
for d in context.dicts:
context_dict.update(d)
return super(Template, self).render(context_dict)
class Loader(app_directories.Loader):
is_usable = True
def load_template(self, template_name, template_dirs=None):
source, origin = self.load_template_source(template_name, template_dirs)
template = Template(source)
return template, origin | Add example code copied from django doc | Add example code copied from django doc
| Python | mit | imom0/django-plim | #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
Add example code copied from django doc | #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
from django.template.loaders import app_directories
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
class Template(MakoTemplate):
def render(self, context):
context_dict = {}
for d in context.dicts:
context_dict.update(d)
return super(Template, self).render(context_dict)
class Loader(app_directories.Loader):
is_usable = True
def load_template(self, template_name, template_dirs=None):
source, origin = self.load_template_source(template_name, template_dirs)
template = Template(source)
return template, origin | <commit_before>#!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
<commit_msg>Add example code copied from django doc<commit_after> | #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
from django.template.loaders import app_directories
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
class Template(MakoTemplate):
def render(self, context):
context_dict = {}
for d in context.dicts:
context_dict.update(d)
return super(Template, self).render(context_dict)
class Loader(app_directories.Loader):
is_usable = True
def load_template(self, template_name, template_dirs=None):
source, origin = self.load_template_source(template_name, template_dirs)
template = Template(source)
return template, origin | #!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
Add example code copied from django doc#!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
from django.template.loaders import app_directories
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
class Template(MakoTemplate):
def render(self, context):
context_dict = {}
for d in context.dicts:
context_dict.update(d)
return super(Template, self).render(context_dict)
class Loader(app_directories.Loader):
is_usable = True
def load_template(self, template_name, template_dirs=None):
source, origin = self.load_template_source(template_name, template_dirs)
template = Template(source)
return template, origin | <commit_before>#!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
<commit_msg>Add example code copied from django doc<commit_after>#!/usr/bin/env python
#-*- coding: UTF-8 -*-
from functools import partial
from django.conf import settings
from plim import preprocessor as plim_preprocessor
from mako.template import Template as MakoTemplate
from mako.lookup import TemplateLookup
from django.template.loaders import app_directories
lookup = TemplateLookup(directories=settings.TEMPLATE_DIRS)
Template = partial(MakoTemplate, lookup=lookup,
preprocessor=plim_preprocessor)
class Template(MakoTemplate):
def render(self, context):
context_dict = {}
for d in context.dicts:
context_dict.update(d)
return super(Template, self).render(context_dict)
class Loader(app_directories.Loader):
is_usable = True
def load_template(self, template_name, template_dirs=None):
source, origin = self.load_template_source(template_name, template_dirs)
template = Template(source)
return template, origin |
c53824a3427235c814cfe35c5c85fd5e1e312b40 | i3/.config/i3/scripts/lock_screen/lock_screen.py | i3/.config/i3/scripts/lock_screen/lock_screen.py | #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
dpms.GetTimeouts()
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
dpms.GetTimeouts()
| #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
| Remove call to GetTimeouts() after SetTimeouts() | i3: Remove call to GetTimeouts() after SetTimeouts()
Fixed in commit 72e984a54049c77208546b8565cece100e87be48 from
m45t3r/python-dpms.
| Python | mit | m45t3r/dotfiles,m45t3r/dotfiles,m45t3r/dotfiles | #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
dpms.GetTimeouts()
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
dpms.GetTimeouts()
i3: Remove call to GetTimeouts() after SetTimeouts()
Fixed in commit 72e984a54049c77208546b8565cece100e87be48 from
m45t3r/python-dpms. | #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
| <commit_before>#!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
dpms.GetTimeouts()
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
dpms.GetTimeouts()
<commit_msg>i3: Remove call to GetTimeouts() after SetTimeouts()
Fixed in commit 72e984a54049c77208546b8565cece100e87be48 from
m45t3r/python-dpms.<commit_after> | #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
| #!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
dpms.GetTimeouts()
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
dpms.GetTimeouts()
i3: Remove call to GetTimeouts() after SetTimeouts()
Fixed in commit 72e984a54049c77208546b8565cece100e87be48 from
m45t3r/python-dpms.#!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
| <commit_before>#!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
dpms.GetTimeouts()
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
dpms.GetTimeouts()
<commit_msg>i3: Remove call to GetTimeouts() after SetTimeouts()
Fixed in commit 72e984a54049c77208546b8565cece100e87be48 from
m45t3r/python-dpms.<commit_after>#!/usr/bin/env python
from subprocess import check_call, CalledProcessError
from tempfile import NamedTemporaryFile
from dpms import DPMS
from mss import mss
from PIL import Image, ImageFilter
GAUSSIAN_BLUR_RADIUS = 5
SCREEN_TIMEOUT = (5, 5, 5) # Standby, Suspend, Off
# Get current DPMS settings
dpms = DPMS()
current_timeouts = dpms.GetTimeouts()
with mss() as sct:
# Get the "All-in-one" monitor
monitor = sct.monitors[0]
# Get raw pixels of the screen
sct_img = sct.grab(monitor)
# Create Image object using Pillow
img = Image.frombytes("RGB", sct_img.size, sct_img.rgb)
with NamedTemporaryFile(suffix=".png") as tempfile:
# Apply filters to Image
img = img.filter(ImageFilter.GaussianBlur(radius=GAUSSIAN_BLUR_RADIUS))
# Save temporary file
img.save(tempfile.name, optimize=False, compress_level=1)
# Set monitor timeout to SCREEN_TIMEOUT
dpms.SetTimeouts(*SCREEN_TIMEOUT)
try:
# Load image in i3lock
check_call(["i3lock", "-nei", tempfile.name])
except CalledProcessError:
# Something went wrong, lock it anyway
check_call(["i3lock", "-ne"])
finally:
# Restore DPMS settings
dpms.SetTimeouts(*current_timeouts)
|
55c72a5297244ba51fba5ebc5b71efc3001e0dd4 | otz/__init__.py | otz/__init__.py | from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
| from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
from otz.Beam import Beam, Bead
| Add Beam, Bead to main module | Add Beam, Bead to main module
| Python | unlicense | ghallsimpsons/optical_tweezers | from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
Add Beam, Bead to main module | from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
from otz.Beam import Beam, Bead
| <commit_before>from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
<commit_msg>Add Beam, Bead to main module<commit_after> | from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
from otz.Beam import Beam, Bead
| from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
Add Beam, Bead to main modulefrom otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
from otz.Beam import Beam, Bead
| <commit_before>from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
<commit_msg>Add Beam, Bead to main module<commit_after>from otz.Timestream import CalibrationTimestream, CapturedTimestream
from otz.Calibration import Calibration
from otz.Beam import Beam, Bead
|
cd75c139910e8968e5262d0f0f5289119b258f21 | phileo/views.py | phileo/views.py | from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
| from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
| Remove user from count query to show likes count for all users for obj | Remove user from count query to show likes count for all users for obj
| Python | mit | pinax/phileo,jacobwegner/phileo,rizumu/pinax-likes,rizumu/pinax-likes,jacobwegner/phileo,pinax/pinax-likes,pinax/phileo | from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
Remove user from count query to show likes count for all users for obj | from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
| <commit_before>from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
<commit_msg>Remove user from count query to show likes count for all users for obj<commit_after> | from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
| from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
Remove user from count query to show likes count for all users for objfrom django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
| <commit_before>from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
<commit_msg>Remove user from count query to show likes count for all users for obj<commit_after>from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.utils import simplejson as json
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.http import require_POST
from phileo.models import Like
from phileo.signals import object_liked, object_unliked
@require_POST
@login_required
def like_toggle(request, content_type_id, object_id):
content_type = get_object_or_404(ContentType, pk=content_type_id)
like, created = Like.objects.get_or_create(
sender = request.user,
receiver_content_type = content_type,
receiver_object_id = object_id
)
if created:
object_liked.send(sender=Like, like=like)
else:
like.delete()
object_unliked.send(
sender=Like,
object=content_type.get_object_for_this_type(
pk=object_id
)
)
if request.is_ajax():
return HttpResponse(json.dumps({
"likes_count": Like.objects.filter(
receiver_content_type = content_type,
receiver_object_id = object_id
).count()
}), mimetype="application/json")
return redirect(request.META["HTTP_REFERER"])
|
8b0e39eec8a82fd3f5a424ec75678426b2bf523e | cinder/version.py | cinder/version.py | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr import version as pbr_version
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr_version.VersionInfo('cinder')
version_string = version_info.version_string
| # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo:
version = "REDHATCINDERVERSION"
release = "REDHATCINDERRELEASE"
def release_string(self):
return '%s-%s' % (self.version, self.release)
def version_string(self):
return self.version
version_info = VersionInfo()
version_string = version_info.version_string
| Remove runtime dep on python-pbr, python-d2to1 | Remove runtime dep on python-pbr, python-d2to1
Requires RPM spec to fill in REDHATCINDERVERSION.
| Python | apache-2.0 | alex8866/cinder,alex8866/cinder | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr import version as pbr_version
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr_version.VersionInfo('cinder')
version_string = version_info.version_string
Remove runtime dep on python-pbr, python-d2to1
Requires RPM spec to fill in REDHATCINDERVERSION. | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo:
version = "REDHATCINDERVERSION"
release = "REDHATCINDERRELEASE"
def release_string(self):
return '%s-%s' % (self.version, self.release)
def version_string(self):
return self.version
version_info = VersionInfo()
version_string = version_info.version_string
| <commit_before># Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr import version as pbr_version
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr_version.VersionInfo('cinder')
version_string = version_info.version_string
<commit_msg>Remove runtime dep on python-pbr, python-d2to1
Requires RPM spec to fill in REDHATCINDERVERSION.<commit_after> | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo:
version = "REDHATCINDERVERSION"
release = "REDHATCINDERRELEASE"
def release_string(self):
return '%s-%s' % (self.version, self.release)
def version_string(self):
return self.version
version_info = VersionInfo()
version_string = version_info.version_string
| # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr import version as pbr_version
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr_version.VersionInfo('cinder')
version_string = version_info.version_string
Remove runtime dep on python-pbr, python-d2to1
Requires RPM spec to fill in REDHATCINDERVERSION.# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo:
version = "REDHATCINDERVERSION"
release = "REDHATCINDERRELEASE"
def release_string(self):
return '%s-%s' % (self.version, self.release)
def version_string(self):
return self.version
version_info = VersionInfo()
version_string = version_info.version_string
| <commit_before># Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pbr import version as pbr_version
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr_version.VersionInfo('cinder')
version_string = version_info.version_string
<commit_msg>Remove runtime dep on python-pbr, python-d2to1
Requires RPM spec to fill in REDHATCINDERVERSION.<commit_after># Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CINDER_VENDOR = "OpenStack Foundation"
CINDER_PRODUCT = "OpenStack Cinder"
CINDER_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo:
version = "REDHATCINDERVERSION"
release = "REDHATCINDERRELEASE"
def release_string(self):
return '%s-%s' % (self.version, self.release)
def version_string(self):
return self.version
version_info = VersionInfo()
version_string = version_info.version_string
|
9c6739830ea8ccfbe697bc691de001a42f01f9c6 | serial_protocol/test.py | serial_protocol/test.py | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| Write each byte at a time in protocol | Write each byte at a time in protocol
| Python | mit | zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9 | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
Write each byte at a time in protocol | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| <commit_before>import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
<commit_msg>Write each byte at a time in protocol<commit_after> | import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
Write each byte at a time in protocolimport serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
| <commit_before>import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
ser.write('1')
#ser.write('0' if left >= 0 else '1')
#ser.write(struct.pack("B", abs(left) * 255))
#ser.write('0' if right >= 0 else '1')
#ser.write(struct.pack("B", abs(right) * 255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write(bytes(255))
ser.write('0')
ser.write('0')
ser.write('0')
ser.write('0')
print('test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
<commit_msg>Write each byte at a time in protocol<commit_after>import serial
import time
import binascii
import struct
def establishConnection():
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established")
return ser
# Each motor speed is a float from -1.0 to 1.0
def sendDrive(ser, left, right):
if(left < -1 or left > 1 or right < -1 or right > 1):
print("Incorrectly formated drive command!")
return;
# Write OpCode
ser.write('1')
# Write Left Motor Direction
if (left >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Left Motor Speed
ser.write(bytes(abs(left * 255)))
# Write Right Motor Direction
if (right >= 0):
ser.write(bytes(0))
else:
ser.write(bytes(1))
# Write Right Motor Speed
ser.write(bytes(abs(right * 255)))
# Pad message to 9 bytes
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
ser.write(bytes(0))
print('Test')
if __name__ == '__main__':
ser = establishConnection()
sendDrive(ser, -1.0, -1.0)
time.sleep(5)
sendDrive(ser, 1.0, 1.0)
time.sleep(5)
sendDrive(ser, 0.0, 0.0)
|
1503bf01903f088d812a9cb38a4ce7582e063a58 | gaphor/misc/tests/test_gidlethread.py | gaphor/misc/tests/test_gidlethread.py | import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.02 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.02)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
| import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.05 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.05)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
| Fix test flakiness by changing timeout to a longer value | Fix test flakiness by changing timeout to a longer value
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.02 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.02)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
Fix test flakiness by changing timeout to a longer value
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me> | import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.05 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.05)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
| <commit_before>import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.02 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.02)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
<commit_msg>Fix test flakiness by changing timeout to a longer value
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after> | import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.05 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.05)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
| import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.02 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.02)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
Fix test flakiness by changing timeout to a longer value
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.05 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.05)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
| <commit_before>import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.02 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.02)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
<commit_msg>Fix test flakiness by changing timeout to a longer value
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>import pytest
from gaphor.misc.gidlethread import GIdleThread
def counter(count):
for x in range(count):
yield x
@pytest.fixture
def gidle_counter(request):
# Setup GIdle Thread with 0.05 sec timeout
t = GIdleThread(counter(request.param))
t.start()
assert t.is_alive()
wait_result = t.wait(0.05)
yield wait_result
# Teardown GIdle Thread
t.interrupt()
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True)
def test_wait_with_timeout(gidle_counter):
# GIVEN a long coroutine thread
# WHEN waiting short timeout
# THEN timeout is True
assert gidle_counter
@pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True)
def test_wait_until_finished(gidle_counter):
# GIVEN a short coroutine thread
# WHEN wait for coroutine to finish
# THEN coroutine finished
assert not gidle_counter
|
072bc480cbc489cd89d03405026f152934893b7e | go/routers/keyword/view_definition.py | go/routers/keyword/view_definition.py | from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if not form.is_valid():
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
| from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if (not form.is_valid()) or form.cleaned_data['DELETE']:
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
| Revert "Remove unnecessary and broken DELETE check." | Revert "Remove unnecessary and broken DELETE check."
This reverts commit 7906153b4718f34ed31c193a8e80b171e567209c.
Reverting commit accidentally commited straight to develop.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if not form.is_valid():
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
Revert "Remove unnecessary and broken DELETE check."
This reverts commit 7906153b4718f34ed31c193a8e80b171e567209c.
Reverting commit accidentally commited straight to develop. | from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if (not form.is_valid()) or form.cleaned_data['DELETE']:
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
| <commit_before>from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if not form.is_valid():
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
<commit_msg>Revert "Remove unnecessary and broken DELETE check."
This reverts commit 7906153b4718f34ed31c193a8e80b171e567209c.
Reverting commit accidentally commited straight to develop.<commit_after> | from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if (not form.is_valid()) or form.cleaned_data['DELETE']:
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
| from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if not form.is_valid():
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
Revert "Remove unnecessary and broken DELETE check."
This reverts commit 7906153b4718f34ed31c193a8e80b171e567209c.
Reverting commit accidentally commited straight to develop.from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if (not form.is_valid()) or form.cleaned_data['DELETE']:
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
| <commit_before>from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if not form.is_valid():
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
<commit_msg>Revert "Remove unnecessary and broken DELETE check."
This reverts commit 7906153b4718f34ed31c193a8e80b171e567209c.
Reverting commit accidentally commited straight to develop.<commit_after>from django import forms
from go.router.view_definition import RouterViewDefinitionBase, EditRouterView
class KeywordForm(forms.Form):
keyword = forms.CharField()
target_endpoint = forms.CharField()
class BaseKeywordFormSet(forms.formsets.BaseFormSet):
@staticmethod
def initial_from_config(data):
return [{'keyword': k, 'target_endpoint': v}
for k, v in sorted(data.items())]
def to_config(self):
keyword_endpoint_mapping = {}
for form in self:
if (not form.is_valid()) or form.cleaned_data['DELETE']:
continue
keyword = form.cleaned_data['keyword']
target_endpoint = form.cleaned_data['target_endpoint']
keyword_endpoint_mapping[keyword] = target_endpoint
return keyword_endpoint_mapping
KeywordFormSet = forms.formsets.formset_factory(
KeywordForm, can_delete=True, extra=1, formset=BaseKeywordFormSet)
class EditKeywordView(EditRouterView):
edit_forms = (
('keyword_endpoint_mapping', KeywordFormSet),
)
class RouterViewDefinition(RouterViewDefinitionBase):
edit_view = EditKeywordView
|
2867ea119a846e1eb61c64a206058dc0d28d090b | src/foremast/utils/check_task.py | src/foremast/utils/check_task.py | """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..exceptions import SpinnakerTaskError
HEADERS = {'Content-Type': 'application/json', 'Accept': '*/*'}
GATE_URL = "http://gate-api.build.example.com:8084"
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(GATE_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
| """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..consts import API_URL, HEADERS
from ..exceptions import SpinnakerTaskError
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(API_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
| Use global API_URL and HEADERS | refactor: Use global API_URL and HEADERS
See also: PSOBAT-1197
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..exceptions import SpinnakerTaskError
HEADERS = {'Content-Type': 'application/json', 'Accept': '*/*'}
GATE_URL = "http://gate-api.build.example.com:8084"
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(GATE_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
refactor: Use global API_URL and HEADERS
See also: PSOBAT-1197 | """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..consts import API_URL, HEADERS
from ..exceptions import SpinnakerTaskError
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(API_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
| <commit_before>"""Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..exceptions import SpinnakerTaskError
HEADERS = {'Content-Type': 'application/json', 'Accept': '*/*'}
GATE_URL = "http://gate-api.build.example.com:8084"
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(GATE_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
<commit_msg>refactor: Use global API_URL and HEADERS
See also: PSOBAT-1197<commit_after> | """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..consts import API_URL, HEADERS
from ..exceptions import SpinnakerTaskError
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(API_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
| """Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..exceptions import SpinnakerTaskError
HEADERS = {'Content-Type': 'application/json', 'Accept': '*/*'}
GATE_URL = "http://gate-api.build.example.com:8084"
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(GATE_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
refactor: Use global API_URL and HEADERS
See also: PSOBAT-1197"""Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..consts import API_URL, HEADERS
from ..exceptions import SpinnakerTaskError
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(API_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
| <commit_before>"""Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..exceptions import SpinnakerTaskError
HEADERS = {'Content-Type': 'application/json', 'Accept': '*/*'}
GATE_URL = "http://gate-api.build.example.com:8084"
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(GATE_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
<commit_msg>refactor: Use global API_URL and HEADERS
See also: PSOBAT-1197<commit_after>"""Check Taskid status."""
import logging
import requests
from tryagain import retries
from ..consts import API_URL, HEADERS
from ..exceptions import SpinnakerTaskError
LOG = logging.getLogger(__name__)
@retries(max_attempts=10, wait=10, exceptions=(AssertionError, ValueError))
def check_task(taskid, app_name):
"""Check task status.
Args:
taskid: the task id returned from create_elb.
app_name: application name related to this task.
Returns:
polls for task status.
"""
try:
taskurl = taskid.get('ref', '0000')
except AttributeError:
taskurl = taskid
taskid = taskurl.split('/tasks/')[-1]
LOG.info('Checking taskid %s', taskid)
url = '{0}/applications/{1}/tasks/{2}'.format(API_URL, app_name, taskid)
task_response = requests.get(url, headers=HEADERS)
LOG.debug(task_response.json())
assert task_response.ok
task_state = task_response.json()
status = task_state['status']
LOG.info('Current task status: %s', status)
if status == 'SUCCEEDED':
return status
elif status == 'TERMINAL':
raise SpinnakerTaskError(task_state)
else:
raise ValueError
|
939998db349c364aa0f5ba4705d4feb2da7104d5 | nn/flags.py | nn/flags.py | import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
| import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
| Fix float type flag definition | Fix float type flag definition
| Python | unlicense | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
Fix float type flag definition | import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
| <commit_before>import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
<commit_msg>Fix float type flag definition<commit_after> | import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
| import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
Fix float type flag definitionimport functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
| <commit_before>import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
<commit_msg>Fix float type flag definition<commit_after>import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
10db5e8b893a84e765162535f64e1ede81d48b47 | empty_check.py | empty_check.py | from django.core.exceptions import ValidationError
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
| from django.core.exceptions import ValidationError
# Usage example in a custom form
# firstname = forms.CharField(validators = [EmptyCheck()])
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
| Add comment to show usage example | Add comment to show usage example | Python | mit | vishalsodani/django-empty-check-validator | from django.core.exceptions import ValidationError
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
Add comment to show usage example | from django.core.exceptions import ValidationError
# Usage example in a custom form
# firstname = forms.CharField(validators = [EmptyCheck()])
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
| <commit_before>from django.core.exceptions import ValidationError
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
<commit_msg>Add comment to show usage example<commit_after> | from django.core.exceptions import ValidationError
# Usage example in a custom form
# firstname = forms.CharField(validators = [EmptyCheck()])
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
| from django.core.exceptions import ValidationError
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
Add comment to show usage examplefrom django.core.exceptions import ValidationError
# Usage example in a custom form
# firstname = forms.CharField(validators = [EmptyCheck()])
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
| <commit_before>from django.core.exceptions import ValidationError
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
<commit_msg>Add comment to show usage example<commit_after>from django.core.exceptions import ValidationError
# Usage example in a custom form
# firstname = forms.CharField(validators = [EmptyCheck()])
class EmptyCheck(object):
def __call__(self, value):
if len(value.strip()) == 0:
raise ValidationError("Value cannot be empty")
|
34812fe2deec64229efd4119640f3c2ddf0ed415 | visualize.py | visualize.py | '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
| '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
| Make the sprint layout a bit easier to look at | Make the sprint layout a bit easier to look at
| Python | mit | jacksontj/dnms,jacksontj/dnms | '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
Make the sprint layout a bit easier to look at | '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
| <commit_before>'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
<commit_msg>Make the sprint layout a bit easier to look at<commit_after> | '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
| '''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
Make the sprint layout a bit easier to look at'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
| <commit_before>'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
<commit_msg>Make the sprint layout a bit easier to look at<commit_after>'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
67a230dd5673601f2e1f1a8c3deb8597f29287db | src/tmlib/workflow/align/args.py | src/tmlib/workflow/align/args.py | from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=10, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
| from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=100, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
| Increase default batch size for align step | Increase default batch size for align step
| Python | agpl-3.0 | TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary | from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=10, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
Increase default batch size for align step | from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=100, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
| <commit_before>from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=10, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
<commit_msg>Increase default batch size for align step<commit_after> | from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=100, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
| from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=10, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
Increase default batch size for align stepfrom tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=100, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
| <commit_before>from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=10, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
<commit_msg>Increase default batch size for align step<commit_after>from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow.args import Argument
from tmlib.workflow import register_batch_args
from tmlib.workflow import register_submission_args
@register_batch_args('align')
class AlignBatchArguments(BatchArguments):
ref_cycle = Argument(
type=int, required=True, flag='c',
help='''zero-based index of the cycle whose sites should be used
as reference
'''
)
ref_wavelength = Argument(
type=str, required=True, flag='w',
help='name of the wavelength whose images should be used as reference'
)
batch_size = Argument(
type=int, default=100, flag='b',
help='number of image files that should be processed per job'
)
@register_submission_args('align')
class AlignSubmissionArguments(SubmissionArguments):
pass
|
057510c78f80c3592c562006413049ab1292d0a3 | ipaqe_provision_hosts/backend/base.py | ipaqe_provision_hosts/backend/base.py | # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
| # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
| Replace the exceptions in backend classes | Replace the exceptions in backend classes
| Python | mit | apophys/ipaqe-provision-hosts | # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
Replace the exceptions in backend classes | # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
| <commit_before># author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
<commit_msg>Replace the exceptions in backend classes<commit_after> | # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
| # author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
Replace the exceptions in backend classes# author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
| <commit_before># author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
<commit_msg>Replace the exceptions in backend classes<commit_after># author: Milan Kubik
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
|
ecde3e00de3782024f0dfbab7f2d7912ab62342b | src/birding/__init__.py | src/birding/__init__.py | from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .search import SearchManager
from .twitter_api import Twitter
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'SearchManager',
'Twitter',
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
| from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
| Remove unnecessary objects from root namespace. | Remove unnecessary objects from root namespace.
| Python | apache-2.0 | Parsely/birding,Parsely/birding | from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .search import SearchManager
from .twitter_api import Twitter
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'SearchManager',
'Twitter',
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
Remove unnecessary objects from root namespace. | from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
| <commit_before>from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .search import SearchManager
from .twitter_api import Twitter
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'SearchManager',
'Twitter',
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
<commit_msg>Remove unnecessary objects from root namespace.<commit_after> | from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
| from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .search import SearchManager
from .twitter_api import Twitter
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'SearchManager',
'Twitter',
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
Remove unnecessary objects from root namespace.from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
| <commit_before>from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .search import SearchManager
from .twitter_api import Twitter
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'SearchManager',
'Twitter',
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
<commit_msg>Remove unnecessary objects from root namespace.<commit_after>from __future__ import absolute_import, print_function
import logging
from . import bolt, config, follow, search, spout, twitter_api
from .version import VERSION, __version__
from .version import __doc__ as __license__
__all__ = [
'VERSION',
'__license__',
'__version__',
'bolt',
'config',
'follow',
'search',
'spout',
'twitter_api',
]
# Configure the logger. No configuration is exposed by birding itself. A
# project using birding can change the log level after importing `birding`
# with:
#
# logging.getLogger('birding').setLevel(logging.DEBUG)
#
logger = logging.getLogger('birding')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.WARNING)
|
8a43cf58791a665a4fc23bc5d0911af61f7e1fb6 | qipr_approver/approver/views/similar_projects.py | qipr_approver/approver/views/similar_projects.py | from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/') | from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
| Add shib auth to similar projects page | Add shib auth to similar projects page
| Python | apache-2.0 | DevMattM/qipr_approver,DevMattM/qipr_approver,ctsit/qipr_approver,ctsit/qipr_approver,ctsit/qipr_approver,DevMattM/qipr_approver,DevMattM/qipr_approver,PFWhite/qipr_approver,DevMattM/qipr_approver,PFWhite/qipr_approver,ctsit/qipr_approver,PFWhite/qipr_approver,ctsit/qipr_approver,PFWhite/qipr_approver,PFWhite/qipr_approver | from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')Add shib auth to similar projects page | from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
| <commit_before>from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')<commit_msg>Add shib auth to similar projects page<commit_after> | from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
| from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')Add shib auth to similar projects pagefrom django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
| <commit_before>from django.shortcuts import redirect
from approver.workflows import project_crud
from approver.decorators import login_required
import approver.utils as utils
from django.core.urlresolvers import reverse
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')<commit_msg>Add shib auth to similar projects page<commit_after>from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from approver.workflows import project_crud
import approver.utils as utils
@login_required
def similar_projects(request, project_id=None,from_page=None):
project = project_crud.get_project_or_none(project_id)
if project is None:
utils.dashboard_redirect_and_toast(request, 'Invalid request'.format(project_id))
elif request.method == 'GET':
project_scores = project_crud.get_similar_projects(project)
if (len(project_scores) == 0) :
utils.set_toast(request.session, 'No relevant projects were found!')
if(from_page == "dashboard") :
return redirect(reverse("approver:dashboard"))
else :
return redirect(reverse("approver:approve") + str(project.id) + '/')
context = {
'content': 'approver/similar_projects.html',
'project_scores': project_scores,
'project_id' : project_id,
}
return utils.layout_render(request, context)
elif request.method == 'POST':
return redirect(reverse("approver:approve") + str(project.id) + '/')
|
1e574befc82b7df53fe62b9c5089772172f99178 | apps/reactions/serializers.py | apps/reactions/serializers.py | from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
| from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
| Add id to API reponse for Reaction. | Add id to API reponse for Reaction.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
Add id to API reponse for Reaction. | from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
| <commit_before>from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
<commit_msg>Add id to API reponse for Reaction.<commit_after> | from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
| from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
Add id to API reponse for Reaction.from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
| <commit_before>from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction')
<commit_msg>Add id to API reponse for Reaction.<commit_after>from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
author = ReactionAuthorSerializer()
# TODO: This isn't work with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
|
896b385f983ecf939bdc2ea938b9949fdc3fdbb8 | colorise/color_tools.py | colorise/color_tools.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
| Remove unused color distance function | Remove unused color distance function
| Python | bsd-3-clause | MisanthropicBit/colorise | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
Remove unused color distance function | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
<commit_msg>Remove unused color distance function<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
Remove unused color distance function#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
<commit_msg>Remove unused color distance function<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
1c0644d5629008c98f8402b95b797beea1a50bc5 | promgen/sender/__init__.py | promgen/sender/__init__.py | import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
MAPPING = [
('project', Project),
('service', Service),
]
def send(self, data):
sent = 0
for alert in data['alerts']:
for label, klass in self.MAPPING:
logger.debug('Checking for %s', label)
if label in alert['labels']:
logger.debug('Checking for %s %s', label, klass)
for obj in klass.objects.filter(name=alert['labels'][label]):
for sender in obj.sender.filter(sender=self.__module__):
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service')
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| Add a MAPPING table to remove duplicated loop logic | Add a MAPPING table to remove duplicated loop logic
| Python | mit | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen | import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
Add a MAPPING table to remove duplicated loop logic | import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
MAPPING = [
('project', Project),
('service', Service),
]
def send(self, data):
sent = 0
for alert in data['alerts']:
for label, klass in self.MAPPING:
logger.debug('Checking for %s', label)
if label in alert['labels']:
logger.debug('Checking for %s %s', label, klass)
for obj in klass.objects.filter(name=alert['labels'][label]):
for sender in obj.sender.filter(sender=self.__module__):
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service')
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| <commit_before>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
<commit_msg>Add a MAPPING table to remove duplicated loop logic<commit_after> | import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
MAPPING = [
('project', Project),
('service', Service),
]
def send(self, data):
sent = 0
for alert in data['alerts']:
for label, klass in self.MAPPING:
logger.debug('Checking for %s', label)
if label in alert['labels']:
logger.debug('Checking for %s %s', label, klass)
for obj in klass.objects.filter(name=alert['labels'][label]):
for sender in obj.sender.filter(sender=self.__module__):
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service')
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
Add a MAPPING table to remove duplicated loop logicimport logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
MAPPING = [
('project', Project),
('service', Service),
]
def send(self, data):
sent = 0
for alert in data['alerts']:
for label, klass in self.MAPPING:
logger.debug('Checking for %s', label)
if label in alert['labels']:
logger.debug('Checking for %s %s', label, klass)
for obj in klass.objects.filter(name=alert['labels'][label]):
for sender in obj.sender.filter(sender=self.__module__):
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service')
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
| <commit_before>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
<commit_msg>Add a MAPPING table to remove duplicated loop logic<commit_after>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
MAPPING = [
('project', Project),
('service', Service),
]
def send(self, data):
sent = 0
for alert in data['alerts']:
for label, klass in self.MAPPING:
logger.debug('Checking for %s', label)
if label in alert['labels']:
logger.debug('Checking for %s %s', label, klass)
for obj in klass.objects.filter(name=alert['labels'][label]):
for sender in obj.sender.filter(sender=self.__module__):
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service')
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
e174a898595664ff291cbf8ccda0f1c404a73575 | control/server.py | control/server.py | import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, host="localhost"):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
self.connect_fn = None
self.msg_fn = None
self.close_fn = None
self.clients = []
def handle_accepted(self, sock, addr):
new_client = Client(sock)
new_client.msg_fn = self.msg_fn
new_client.close_fn = self.close_fn
self.clients.append(new_client)
if self.connect_fn is not None:
self.connect_fn(new_client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
| import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, connect_fn=None, msg_fn=None, close_fn=None):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind(('localhost', port))
self.listen(5)
self.client_connect_fn = connect_fn
self.client_msg_fn = msg_fn
self.client_close_fn = close_fn
self.clients = []
def handle_accepted(self, sock, addr):
client = Client(sock)
client.msg_fn = self.client_msg_fn
client.close_fn = self.client_close
self.clients.append(client)
if self.client_connect_fn:
self.client_connect_fn(client)
def client_close(self, client):
self.clients.remove(client)
if self.client_close_fn:
self.client_close_fn(client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
| Remove disconnected client from clients list, allow client callbacks to be set in constructor. | Remove disconnected client from clients list, allow client callbacks to be set in constructor.
| Python | mit | zwarren/morse-car-controller,zwarren/morse-car-controller | import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, host="localhost"):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
self.connect_fn = None
self.msg_fn = None
self.close_fn = None
self.clients = []
def handle_accepted(self, sock, addr):
new_client = Client(sock)
new_client.msg_fn = self.msg_fn
new_client.close_fn = self.close_fn
self.clients.append(new_client)
if self.connect_fn is not None:
self.connect_fn(new_client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
Remove disconnected client from clients list, allow client callbacks to be set in constructor. | import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, connect_fn=None, msg_fn=None, close_fn=None):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind(('localhost', port))
self.listen(5)
self.client_connect_fn = connect_fn
self.client_msg_fn = msg_fn
self.client_close_fn = close_fn
self.clients = []
def handle_accepted(self, sock, addr):
client = Client(sock)
client.msg_fn = self.client_msg_fn
client.close_fn = self.client_close
self.clients.append(client)
if self.client_connect_fn:
self.client_connect_fn(client)
def client_close(self, client):
self.clients.remove(client)
if self.client_close_fn:
self.client_close_fn(client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
| <commit_before>import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, host="localhost"):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
self.connect_fn = None
self.msg_fn = None
self.close_fn = None
self.clients = []
def handle_accepted(self, sock, addr):
new_client = Client(sock)
new_client.msg_fn = self.msg_fn
new_client.close_fn = self.close_fn
self.clients.append(new_client)
if self.connect_fn is not None:
self.connect_fn(new_client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
<commit_msg>Remove disconnected client from clients list, allow client callbacks to be set in constructor.<commit_after> | import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, connect_fn=None, msg_fn=None, close_fn=None):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind(('localhost', port))
self.listen(5)
self.client_connect_fn = connect_fn
self.client_msg_fn = msg_fn
self.client_close_fn = close_fn
self.clients = []
def handle_accepted(self, sock, addr):
client = Client(sock)
client.msg_fn = self.client_msg_fn
client.close_fn = self.client_close
self.clients.append(client)
if self.client_connect_fn:
self.client_connect_fn(client)
def client_close(self, client):
self.clients.remove(client)
if self.client_close_fn:
self.client_close_fn(client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
| import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, host="localhost"):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
self.connect_fn = None
self.msg_fn = None
self.close_fn = None
self.clients = []
def handle_accepted(self, sock, addr):
new_client = Client(sock)
new_client.msg_fn = self.msg_fn
new_client.close_fn = self.close_fn
self.clients.append(new_client)
if self.connect_fn is not None:
self.connect_fn(new_client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
Remove disconnected client from clients list, allow client callbacks to be set in constructor.import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, connect_fn=None, msg_fn=None, close_fn=None):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind(('localhost', port))
self.listen(5)
self.client_connect_fn = connect_fn
self.client_msg_fn = msg_fn
self.client_close_fn = close_fn
self.clients = []
def handle_accepted(self, sock, addr):
client = Client(sock)
client.msg_fn = self.client_msg_fn
client.close_fn = self.client_close
self.clients.append(client)
if self.client_connect_fn:
self.client_connect_fn(client)
def client_close(self, client):
self.clients.remove(client)
if self.client_close_fn:
self.client_close_fn(client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
| <commit_before>import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, host="localhost"):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind((host, port))
self.listen(5)
self.connect_fn = None
self.msg_fn = None
self.close_fn = None
self.clients = []
def handle_accepted(self, sock, addr):
new_client = Client(sock)
new_client.msg_fn = self.msg_fn
new_client.close_fn = self.close_fn
self.clients.append(new_client)
if self.connect_fn is not None:
self.connect_fn(new_client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
<commit_msg>Remove disconnected client from clients list, allow client callbacks to be set in constructor.<commit_after>import asyncore
import socket
from logging import error, info, warning
from client import Client
class Server(asyncore.dispatcher):
def __init__(self, port, connect_fn=None, msg_fn=None, close_fn=None):
asyncore.dispatcher.__init__(self)
self.create_socket()
self.set_reuse_addr()
self.bind(('localhost', port))
self.listen(5)
self.client_connect_fn = connect_fn
self.client_msg_fn = msg_fn
self.client_close_fn = close_fn
self.clients = []
def handle_accepted(self, sock, addr):
client = Client(sock)
client.msg_fn = self.client_msg_fn
client.close_fn = self.client_close
self.clients.append(client)
if self.client_connect_fn:
self.client_connect_fn(client)
def client_close(self, client):
self.clients.remove(client)
if self.client_close_fn:
self.client_close_fn(client)
def broadcast(self, msg):
for client in self.clients:
client.send_msg(msg)
|
91720739af3c7b35e331949cdd64a98023e23799 | parkings/api/public/parking_area.py | parkings/api/public/parking_area.py | from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
| from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
| Add bbox to parking area view set | Add bbox to parking area view set
| Python | mit | tuomas777/parkkihubi | from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
Add bbox to parking area view set | from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
| <commit_before>from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
<commit_msg>Add bbox to parking area view set<commit_after> | from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
| from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
Add bbox to parking area view setfrom rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
| <commit_before>from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
<commit_msg>Add bbox to parking area view set<commit_after>from rest_framework import viewsets
from rest_framework_gis.pagination import GeoJsonPagination
from rest_framework_gis.serializers import GeoFeatureModelSerializer, GeometrySerializerMethodField
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaSerializer(GeoFeatureModelSerializer):
wgs84_areas = GeometrySerializerMethodField()
def get_wgs84_areas(self, area):
return area.areas.transform(4326, clone=True)
class Meta:
model = ParkingArea
geo_field = 'wgs84_areas'
fields = (
'id',
'space_amount_estimate',
)
class PublicAPIParkingAreaViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaSerializer
pagination_class = GeoJsonPagination
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
|
d52034eddeb510acc367c87c88e4277994157338 | githubsetupircnotifications.py | githubsetupircnotifications.py | """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
| """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
| Create the hook for each repo | Create the hook for each repo
| Python | mit | kragniz/github-setup-irc-notifications | """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
Create the hook for each repo | """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
| <commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
<commit_msg>Create the hook for each repo<commit_after> | """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
| """
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
Create the hook for each repo"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
| <commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
<commit_msg>Create the hook for each repo<commit_after>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
|
ed4c80aa8e9ee628876c3cc96907ca407ee4ff5d | backend/scripts/ddirdenorm.py | backend/scripts/ddirdenorm.py | #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
| #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
| Handle non-existent files in the database. | Handle non-existent files in the database.
| Python | mit | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org | #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
Handle non-existent files in the database. | #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
| <commit_before>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_msg>Handle non-existent files in the database.<commit_after> | #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
| #!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
Handle non-existent files in the database.#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
| <commit_before>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_msg>Handle non-existent files in the database.<commit_after>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
e7627ee439e2e4f17466bf124629ae353460a68d | __init__.py | __init__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Change product types are really dangerous!!! | [FIX] Change product types are really dangerous!!! | Python | agpl-3.0 | odoo-l10n-ar/l10n_ar_invoice | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
[FIX] Change product types are really dangerous!!! | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>[FIX] Change product types are really dangerous!!!<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
[FIX] Change product types are really dangerous!!!# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
import product
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>[FIX] Change product types are really dangerous!!!<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test
import afip
import invoice
import config
import partner
import account
import country
import report
import currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
abea9151ce3f97033038102b4b01e84b66b8b670 | __init__.py | __init__.py | ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
reload(bugmail)
reload(traceparser)
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| Make the bot reload its secondary modules when you reload the plugin. | Make the bot reload its secondary modules when you reload the plugin.
| Python | bsd-3-clause | aleb/supybot-bugzilla | ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
Make the bot reload its secondary modules when you reload the plugin. | ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
reload(bugmail)
reload(traceparser)
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| <commit_before>###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
<commit_msg>Make the bot reload its secondary modules when you reload the plugin.<commit_after> | ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
reload(bugmail)
reload(traceparser)
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| ###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
Make the bot reload its secondary modules when you reload the plugin.###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
reload(bugmail)
reload(traceparser)
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| <commit_before>###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
<commit_msg>Make the bot reload its secondary modules when you reload the plugin.<commit_after>###
# Copyright (c) 2007, Max Kanat-Alexander
# All rights reserved.
#
#
###
"""
Interact with Bugzilla installations.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "3.0.0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Max Kanat-Alexander', 'mkanat',
'mkanat@bugzilla.org')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/Members/mkanat/Bugzilla'
import config
import plugin
reload(plugin) # In case we're being reloaded.
reload(bugmail)
reload(traceparser)
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.