commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dcac78f31a2c46038d547821d8b56d721a095093
|
app/tools/application.py
|
app/tools/application.py
|
# -*- coding:utf-8 -*-
import os,time,asyncio,json
import logging
logging.basicConfig(level=logging.ERROR)
try:
from aiohttp import web
except ImportError:
logging.error("Can't import module aiohttp")
from tools.log import Log
from tools.httptools import Middleware,Route,Template
from tools.config import Config
class Application(web.Application):
def __init__(self,loop):
self._loop=loop
super(Application,self).__init__(loop=loop,middlewares=Middleware.allmiddlewares())
@asyncio.coroutine
def run(self,addr='127.0.0.1',port='8000'):
Template.init(self)
Route.register_route(self)
pool=yield from create_pool(self._loop)
srv=yield from self._loop.create_server(self.make_handler(),addr,port)
logging.info("server start at http://%s:%s"%(addr,port)
return srv
|
Add Application class,for init all self write module
|
Add Application class,for init all self write module
|
Python
|
mit
|
free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog
|
Add Application class,for init all self write module
|
# -*- coding:utf-8 -*-
import os,time,asyncio,json
import logging
logging.basicConfig(level=logging.ERROR)
try:
from aiohttp import web
except ImportError:
logging.error("Can't import module aiohttp")
from tools.log import Log
from tools.httptools import Middleware,Route,Template
from tools.config import Config
class Application(web.Application):
def __init__(self,loop):
self._loop=loop
super(Application,self).__init__(loop=loop,middlewares=Middleware.allmiddlewares())
@asyncio.coroutine
def run(self,addr='127.0.0.1',port='8000'):
Template.init(self)
Route.register_route(self)
pool=yield from create_pool(self._loop)
srv=yield from self._loop.create_server(self.make_handler(),addr,port)
logging.info("server start at http://%s:%s"%(addr,port)
return srv
|
<commit_before><commit_msg>Add Application class,for init all self write module<commit_after>
|
# -*- coding:utf-8 -*-
import os,time,asyncio,json
import logging
logging.basicConfig(level=logging.ERROR)
try:
from aiohttp import web
except ImportError:
logging.error("Can't import module aiohttp")
from tools.log import Log
from tools.httptools import Middleware,Route,Template
from tools.config import Config
class Application(web.Application):
def __init__(self,loop):
self._loop=loop
super(Application,self).__init__(loop=loop,middlewares=Middleware.allmiddlewares())
@asyncio.coroutine
def run(self,addr='127.0.0.1',port='8000'):
Template.init(self)
Route.register_route(self)
pool=yield from create_pool(self._loop)
srv=yield from self._loop.create_server(self.make_handler(),addr,port)
logging.info("server start at http://%s:%s"%(addr,port)
return srv
|
Add Application class,for init all self write module# -*- coding:utf-8 -*-
import os,time,asyncio,json
import logging
logging.basicConfig(level=logging.ERROR)
try:
from aiohttp import web
except ImportError:
logging.error("Can't import module aiohttp")
from tools.log import Log
from tools.httptools import Middleware,Route,Template
from tools.config import Config
class Application(web.Application):
def __init__(self,loop):
self._loop=loop
super(Application,self).__init__(loop=loop,middlewares=Middleware.allmiddlewares())
@asyncio.coroutine
def run(self,addr='127.0.0.1',port='8000'):
Template.init(self)
Route.register_route(self)
pool=yield from create_pool(self._loop)
srv=yield from self._loop.create_server(self.make_handler(),addr,port)
logging.info("server start at http://%s:%s"%(addr,port)
return srv
|
<commit_before><commit_msg>Add Application class,for init all self write module<commit_after># -*- coding:utf-8 -*-
import os,time,asyncio,json
import logging
logging.basicConfig(level=logging.ERROR)
try:
from aiohttp import web
except ImportError:
logging.error("Can't import module aiohttp")
from tools.log import Log
from tools.httptools import Middleware,Route,Template
from tools.config import Config
class Application(web.Application):
def __init__(self,loop):
self._loop=loop
super(Application,self).__init__(loop=loop,middlewares=Middleware.allmiddlewares())
@asyncio.coroutine
def run(self,addr='127.0.0.1',port='8000'):
Template.init(self)
Route.register_route(self)
pool=yield from create_pool(self._loop)
srv=yield from self._loop.create_server(self.make_handler(),addr,port)
logging.info("server start at http://%s:%s"%(addr,port)
return srv
|
|
cbb6bee9622f5a899892a3fdf05368ac3665ff06
|
zues/migrations/0002_change_managers.py
|
zues/migrations/0002_change_managers.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import zues.utils
class Migration(migrations.Migration):
dependencies = [
('zues', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='actuelepolitiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='amendement',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='categorie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='hrwijziging',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='login',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='organimo',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='politiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='resolutie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='settings',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
]
|
Add migration to change managers
|
Add migration to change managers
|
Python
|
mit
|
jonge-democraten/zues,jonge-democraten/zues,jonge-democraten/zues
|
Add migration to change managers
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import zues.utils
class Migration(migrations.Migration):
dependencies = [
('zues', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='actuelepolitiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='amendement',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='categorie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='hrwijziging',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='login',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='organimo',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='politiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='resolutie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='settings',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
]
|
<commit_before><commit_msg>Add migration to change managers<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import zues.utils
class Migration(migrations.Migration):
dependencies = [
('zues', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='actuelepolitiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='amendement',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='categorie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='hrwijziging',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='login',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='organimo',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='politiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='resolutie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='settings',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
]
|
Add migration to change managers# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import zues.utils
class Migration(migrations.Migration):
dependencies = [
('zues', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='actuelepolitiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='amendement',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='categorie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='hrwijziging',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='login',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='organimo',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='politiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='resolutie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='settings',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
]
|
<commit_before><commit_msg>Add migration to change managers<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import zues.utils
class Migration(migrations.Migration):
dependencies = [
('zues', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='actuelepolitiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='amendement',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='categorie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='hrwijziging',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='login',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='organimo',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='politiekemotie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='resolutie',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
migrations.AlterModelManagers(
name='settings',
managers=[
('objects', zues.utils.CurrentSiteManager()),
],
),
]
|
|
d1009f5de4a8e9a97686d36f26d60255ffffea42
|
anytownlib/kavrayskiy.py
|
anytownlib/kavrayskiy.py
|
"""Functions that produce a Kavrayskiy VII projection map."""
import math
def coords_to_kavrayskiy(coords):
"""Convert geographical coordinates to Kavrayskiy VII coordinates.
A Kavrayskiy VII map is defined with the following dimensions:
- Height: pi units
- Width: sqrt(3) * pi units
"""
# convert degrees to radians
lat, lng = map(lambda deg: deg * math.pi / 180, coords)
x = (3 * lng / 2) * math.sqrt((1 / 3.) - (lat / math.pi)**2)
y = lat
return (x, y)
|
Add Kavrayskiy VII utils lib
|
Add Kavrayskiy VII utils lib
|
Python
|
mit
|
andrewyang96/AnytownMapper,andrewyang96/AnytownMapper,andrewyang96/AnytownMapper
|
Add Kavrayskiy VII utils lib
|
"""Functions that produce a Kavrayskiy VII projection map."""
import math
def coords_to_kavrayskiy(coords):
"""Convert geographical coordinates to Kavrayskiy VII coordinates.
A Kavrayskiy VII map is defined with the following dimensions:
- Height: pi units
- Width: sqrt(3) * pi units
"""
# convert degrees to radians
lat, lng = map(lambda deg: deg * math.pi / 180, coords)
x = (3 * lng / 2) * math.sqrt((1 / 3.) - (lat / math.pi)**2)
y = lat
return (x, y)
|
<commit_before><commit_msg>Add Kavrayskiy VII utils lib<commit_after>
|
"""Functions that produce a Kavrayskiy VII projection map."""
import math
def coords_to_kavrayskiy(coords):
"""Convert geographical coordinates to Kavrayskiy VII coordinates.
A Kavrayskiy VII map is defined with the following dimensions:
- Height: pi units
- Width: sqrt(3) * pi units
"""
# convert degrees to radians
lat, lng = map(lambda deg: deg * math.pi / 180, coords)
x = (3 * lng / 2) * math.sqrt((1 / 3.) - (lat / math.pi)**2)
y = lat
return (x, y)
|
Add Kavrayskiy VII utils lib"""Functions that produce a Kavrayskiy VII projection map."""
import math
def coords_to_kavrayskiy(coords):
"""Convert geographical coordinates to Kavrayskiy VII coordinates.
A Kavrayskiy VII map is defined with the following dimensions:
- Height: pi units
- Width: sqrt(3) * pi units
"""
# convert degrees to radians
lat, lng = map(lambda deg: deg * math.pi / 180, coords)
x = (3 * lng / 2) * math.sqrt((1 / 3.) - (lat / math.pi)**2)
y = lat
return (x, y)
|
<commit_before><commit_msg>Add Kavrayskiy VII utils lib<commit_after>"""Functions that produce a Kavrayskiy VII projection map."""
import math
def coords_to_kavrayskiy(coords):
"""Convert geographical coordinates to Kavrayskiy VII coordinates.
A Kavrayskiy VII map is defined with the following dimensions:
- Height: pi units
- Width: sqrt(3) * pi units
"""
# convert degrees to radians
lat, lng = map(lambda deg: deg * math.pi / 180, coords)
x = (3 * lng / 2) * math.sqrt((1 / 3.) - (lat / math.pi)**2)
y = lat
return (x, y)
|
|
fc253535f2ef3cc256b8dd6912b65ac136eafb9c
|
heat/tests/functional/test_WordPress_Single_Instance_With_HA.py
|
heat/tests/functional/test_WordPress_Single_Instance_With_HA.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'HA'])
class HaFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Single_Instance_With_HA.template'
func_utils = util.FuncUtils()
func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
func_utils.create_stack(template, 'F17')
func_utils.check_cfntools()
func_utils.wait_for_provisioning()
func_utils.check_user_data(template)
self.ssh = func_utils.get_ssh_client()
def service_is_running(self, name):
stdin, stdout, sterr = \
self.ssh.exec_command('sudo service status %s' % name)
lines = stdout.readlines()
for line in lines:
if 'Active: active (running)' in line:
return True
return False
def test_instance(self):
# ensure wordpress was installed
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
self.assertEqual(result, wp_file)
print "Wordpress installation detected"
# check the httpd service is running
self.assertTrue(self.service_is_running('httpd'))
# kill httpd
self.ssh.exec_command('sudo service stop httpd')
# check that httpd service recovers
# should take less than 60 seconds, but no worse than 70 seconds
tries = 0
while not self.service_is_running('httpd'):
tries += 1
self.assertTrue(tries < 8)
time.sleep(10)
func_utils.cleanup()
|
Add functional test for HA template
|
Add functional test for HA template
Change-Id: I6d3304b807492e7041264402d161365447fa6ce1
Signed-off-by: Angus Salkeld <86b65304d27d8de73dd7d624c33df7e088f8d94b@redhat.com>
|
Python
|
apache-2.0
|
takeshineshiro/heat,cryptickp/heat,dims/heat,pratikmallya/heat,varunarya10/heat,jasondunsmore/heat,steveb/heat,rickerc/heat_audit,dims/heat,varunarya10/heat,citrix-openstack-build/heat,jasondunsmore/heat,maestro-hybrid-cloud/heat,miguelgrinberg/heat,pratikmallya/heat,cwolferh/heat-scratch,gonzolino/heat,dragorosson/heat,cwolferh/heat-scratch,pshchelo/heat,citrix-openstack-build/heat,rh-s/heat,srznew/heat,dragorosson/heat,redhat-openstack/heat,noironetworks/heat,JioCloud/heat,rh-s/heat,pshchelo/heat,maestro-hybrid-cloud/heat,ntt-sic/heat,Triv90/Heat,cryptickp/heat,NeCTAR-RC/heat,NeCTAR-RC/heat,JioCloud/heat,gonzolino/heat,Triv90/Heat,takeshineshiro/heat,steveb/heat,rdo-management/heat,ntt-sic/heat,srznew/heat,rickerc/heat_audit,openstack/heat,miguelgrinberg/heat,rdo-management/heat,openstack/heat,Triv90/Heat,noironetworks/heat,redhat-openstack/heat
|
Add functional test for HA template
Change-Id: I6d3304b807492e7041264402d161365447fa6ce1
Signed-off-by: Angus Salkeld <86b65304d27d8de73dd7d624c33df7e088f8d94b@redhat.com>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'HA'])
class HaFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Single_Instance_With_HA.template'
func_utils = util.FuncUtils()
func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
func_utils.create_stack(template, 'F17')
func_utils.check_cfntools()
func_utils.wait_for_provisioning()
func_utils.check_user_data(template)
self.ssh = func_utils.get_ssh_client()
def service_is_running(self, name):
stdin, stdout, sterr = \
self.ssh.exec_command('sudo service status %s' % name)
lines = stdout.readlines()
for line in lines:
if 'Active: active (running)' in line:
return True
return False
def test_instance(self):
# ensure wordpress was installed
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
self.assertEqual(result, wp_file)
print "Wordpress installation detected"
# check the httpd service is running
self.assertTrue(self.service_is_running('httpd'))
# kill httpd
self.ssh.exec_command('sudo service stop httpd')
# check that httpd service recovers
# should take less than 60 seconds, but no worse than 70 seconds
tries = 0
while not self.service_is_running('httpd'):
tries += 1
self.assertTrue(tries < 8)
time.sleep(10)
func_utils.cleanup()
|
<commit_before><commit_msg>Add functional test for HA template
Change-Id: I6d3304b807492e7041264402d161365447fa6ce1
Signed-off-by: Angus Salkeld <86b65304d27d8de73dd7d624c33df7e088f8d94b@redhat.com><commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'HA'])
class HaFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Single_Instance_With_HA.template'
func_utils = util.FuncUtils()
func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
func_utils.create_stack(template, 'F17')
func_utils.check_cfntools()
func_utils.wait_for_provisioning()
func_utils.check_user_data(template)
self.ssh = func_utils.get_ssh_client()
def service_is_running(self, name):
stdin, stdout, sterr = \
self.ssh.exec_command('sudo service status %s' % name)
lines = stdout.readlines()
for line in lines:
if 'Active: active (running)' in line:
return True
return False
def test_instance(self):
# ensure wordpress was installed
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
self.assertEqual(result, wp_file)
print "Wordpress installation detected"
# check the httpd service is running
self.assertTrue(self.service_is_running('httpd'))
# kill httpd
self.ssh.exec_command('sudo service stop httpd')
# check that httpd service recovers
# should take less than 60 seconds, but no worse than 70 seconds
tries = 0
while not self.service_is_running('httpd'):
tries += 1
self.assertTrue(tries < 8)
time.sleep(10)
func_utils.cleanup()
|
Add functional test for HA template
Change-Id: I6d3304b807492e7041264402d161365447fa6ce1
Signed-off-by: Angus Salkeld <86b65304d27d8de73dd7d624c33df7e088f8d94b@redhat.com># vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'HA'])
class HaFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Single_Instance_With_HA.template'
func_utils = util.FuncUtils()
func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
func_utils.create_stack(template, 'F17')
func_utils.check_cfntools()
func_utils.wait_for_provisioning()
func_utils.check_user_data(template)
self.ssh = func_utils.get_ssh_client()
def service_is_running(self, name):
stdin, stdout, sterr = \
self.ssh.exec_command('sudo service status %s' % name)
lines = stdout.readlines()
for line in lines:
if 'Active: active (running)' in line:
return True
return False
def test_instance(self):
# ensure wordpress was installed
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
self.assertEqual(result, wp_file)
print "Wordpress installation detected"
# check the httpd service is running
self.assertTrue(self.service_is_running('httpd'))
# kill httpd
self.ssh.exec_command('sudo service stop httpd')
# check that httpd service recovers
# should take less than 60 seconds, but no worse than 70 seconds
tries = 0
while not self.service_is_running('httpd'):
tries += 1
self.assertTrue(tries < 8)
time.sleep(10)
func_utils.cleanup()
|
<commit_before><commit_msg>Add functional test for HA template
Change-Id: I6d3304b807492e7041264402d161365447fa6ce1
Signed-off-by: Angus Salkeld <86b65304d27d8de73dd7d624c33df7e088f8d94b@redhat.com><commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import util
import nose
from nose.plugins.attrib import attr
import unittest
@attr(speed='slow')
@attr(tag=['func', 'wordpress', 'HA'])
class HaFunctionalTest(unittest.TestCase):
def setUp(self):
template = 'WordPress_Single_Instance_With_HA.template'
func_utils = util.FuncUtils()
func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')
func_utils.create_stack(template, 'F17')
func_utils.check_cfntools()
func_utils.wait_for_provisioning()
func_utils.check_user_data(template)
self.ssh = func_utils.get_ssh_client()
def service_is_running(self, name):
stdin, stdout, sterr = \
self.ssh.exec_command('sudo service status %s' % name)
lines = stdout.readlines()
for line in lines:
if 'Active: active (running)' in line:
return True
return False
def test_instance(self):
# ensure wordpress was installed
wp_file = '/etc/wordpress/wp-config.php'
stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)
result = stdout.readlines().pop().rstrip()
self.assertEqual(result, wp_file)
print "Wordpress installation detected"
# check the httpd service is running
self.assertTrue(self.service_is_running('httpd'))
# kill httpd
self.ssh.exec_command('sudo service stop httpd')
# check that httpd service recovers
# should take less than 60 seconds, but no worse than 70 seconds
tries = 0
while not self.service_is_running('httpd'):
tries += 1
self.assertTrue(tries < 8)
time.sleep(10)
func_utils.cleanup()
|
|
55fe2230ad3d4f9c4957600b0ff49ce23575017a
|
senato/spiders/zan_spider.py
|
senato/spiders/zan_spider.py
|
# -*- coding: utf-8 -*-
import os
import re
import scrapy
AMENDMENT_XPATH = '//dl[@class="emendamenti"]/dt/a/@href'
AMENDMENT_URL = '/leg/18/BGT/Testi/Emend/0{parent}/0{_id}.akn'
HREF_REGEX = r'id=(?P<_id>\d+)&idoggetto=(?P<parent>\d+)'
class ZanSpider(scrapy.Spider):
name = 'zan'
allowed_domains = ['senato.it']
start_urls = ['http://www.senato.it/leg/18/BGT/Schede/Ddliter/testi/53457_testi.htm']
def parse(self, response):
for href in response.xpath(AMENDMENT_XPATH).extract():
match = re.search(HREF_REGEX, href)
if match:
_id = match.group('_id')
parent = match.group('parent')
relative_url = AMENDMENT_URL.format(_id=_id, parent=parent)
absolute_url = response.urljoin(relative_url)
yield scrapy.Request(absolute_url, callback=self.download_amendment)
def download_amendment(self, response):
relative_filename = response.url.split('/')[-1]
absolute_filename = os.path.join(os.getcwd(), 'data', self.name, relative_filename)
with open(absolute_filename, 'wb') as f:
f.write(response.body)
|
Add spider for the amendments to the Zan DDL
|
Add spider for the amendments to the Zan DDL
|
Python
|
mit
|
jacquerie/senato.py
|
Add spider for the amendments to the Zan DDL
|
# -*- coding: utf-8 -*-
import os
import re
import scrapy
AMENDMENT_XPATH = '//dl[@class="emendamenti"]/dt/a/@href'
AMENDMENT_URL = '/leg/18/BGT/Testi/Emend/0{parent}/0{_id}.akn'
HREF_REGEX = r'id=(?P<_id>\d+)&idoggetto=(?P<parent>\d+)'
class ZanSpider(scrapy.Spider):
name = 'zan'
allowed_domains = ['senato.it']
start_urls = ['http://www.senato.it/leg/18/BGT/Schede/Ddliter/testi/53457_testi.htm']
def parse(self, response):
for href in response.xpath(AMENDMENT_XPATH).extract():
match = re.search(HREF_REGEX, href)
if match:
_id = match.group('_id')
parent = match.group('parent')
relative_url = AMENDMENT_URL.format(_id=_id, parent=parent)
absolute_url = response.urljoin(relative_url)
yield scrapy.Request(absolute_url, callback=self.download_amendment)
def download_amendment(self, response):
relative_filename = response.url.split('/')[-1]
absolute_filename = os.path.join(os.getcwd(), 'data', self.name, relative_filename)
with open(absolute_filename, 'wb') as f:
f.write(response.body)
|
<commit_before><commit_msg>Add spider for the amendments to the Zan DDL<commit_after>
|
# -*- coding: utf-8 -*-
import os
import re
import scrapy
AMENDMENT_XPATH = '//dl[@class="emendamenti"]/dt/a/@href'
AMENDMENT_URL = '/leg/18/BGT/Testi/Emend/0{parent}/0{_id}.akn'
HREF_REGEX = r'id=(?P<_id>\d+)&idoggetto=(?P<parent>\d+)'
class ZanSpider(scrapy.Spider):
name = 'zan'
allowed_domains = ['senato.it']
start_urls = ['http://www.senato.it/leg/18/BGT/Schede/Ddliter/testi/53457_testi.htm']
def parse(self, response):
for href in response.xpath(AMENDMENT_XPATH).extract():
match = re.search(HREF_REGEX, href)
if match:
_id = match.group('_id')
parent = match.group('parent')
relative_url = AMENDMENT_URL.format(_id=_id, parent=parent)
absolute_url = response.urljoin(relative_url)
yield scrapy.Request(absolute_url, callback=self.download_amendment)
def download_amendment(self, response):
relative_filename = response.url.split('/')[-1]
absolute_filename = os.path.join(os.getcwd(), 'data', self.name, relative_filename)
with open(absolute_filename, 'wb') as f:
f.write(response.body)
|
Add spider for the amendments to the Zan DDL# -*- coding: utf-8 -*-
import os
import re
import scrapy
AMENDMENT_XPATH = '//dl[@class="emendamenti"]/dt/a/@href'
AMENDMENT_URL = '/leg/18/BGT/Testi/Emend/0{parent}/0{_id}.akn'
HREF_REGEX = r'id=(?P<_id>\d+)&idoggetto=(?P<parent>\d+)'
class ZanSpider(scrapy.Spider):
name = 'zan'
allowed_domains = ['senato.it']
start_urls = ['http://www.senato.it/leg/18/BGT/Schede/Ddliter/testi/53457_testi.htm']
def parse(self, response):
for href in response.xpath(AMENDMENT_XPATH).extract():
match = re.search(HREF_REGEX, href)
if match:
_id = match.group('_id')
parent = match.group('parent')
relative_url = AMENDMENT_URL.format(_id=_id, parent=parent)
absolute_url = response.urljoin(relative_url)
yield scrapy.Request(absolute_url, callback=self.download_amendment)
def download_amendment(self, response):
relative_filename = response.url.split('/')[-1]
absolute_filename = os.path.join(os.getcwd(), 'data', self.name, relative_filename)
with open(absolute_filename, 'wb') as f:
f.write(response.body)
|
<commit_before><commit_msg>Add spider for the amendments to the Zan DDL<commit_after># -*- coding: utf-8 -*-
import os
import re
import scrapy
AMENDMENT_XPATH = '//dl[@class="emendamenti"]/dt/a/@href'
AMENDMENT_URL = '/leg/18/BGT/Testi/Emend/0{parent}/0{_id}.akn'
HREF_REGEX = r'id=(?P<_id>\d+)&idoggetto=(?P<parent>\d+)'
class ZanSpider(scrapy.Spider):
name = 'zan'
allowed_domains = ['senato.it']
start_urls = ['http://www.senato.it/leg/18/BGT/Schede/Ddliter/testi/53457_testi.htm']
def parse(self, response):
for href in response.xpath(AMENDMENT_XPATH).extract():
match = re.search(HREF_REGEX, href)
if match:
_id = match.group('_id')
parent = match.group('parent')
relative_url = AMENDMENT_URL.format(_id=_id, parent=parent)
absolute_url = response.urljoin(relative_url)
yield scrapy.Request(absolute_url, callback=self.download_amendment)
def download_amendment(self, response):
relative_filename = response.url.split('/')[-1]
absolute_filename = os.path.join(os.getcwd(), 'data', self.name, relative_filename)
with open(absolute_filename, 'wb') as f:
f.write(response.body)
|
|
4e3b01b3f702c60083dd94136263cea9e11fb725
|
server/daily_backup.py
|
server/daily_backup.py
|
#!/usr/bin/env python2
# coding: utf-8
'''
A daily backup script
How to use:
0. Init a borg repository using `borg init ...`
1. Fill information in this script
2. Set a daily cron job for this script
3. Sleep. :) You'll be notified once the backup finished or failed.
'''
import os
from requests import post as http_post
from sh import borg, hostname, date
# Please init before using this script
# `borg init ...`
BORG_REPOSITORY = '/your/borg/repo/name'
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
'/important/data',
'/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
'*.pyc',
'*.swp',
'/i/dont/care/about/this/data',
'/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()
def backup(*directories, **kwargs):
'''
Backup a directory using borg
'''
directories = [d for d in directories if os.path.exists(d)]
repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
excludes = kwargs.pop('excludes', [])
excludes = [excludes, ]\
if not isinstance(excludes, (list, tuple, ))\
else excludes
arguments = ['--stats', '--compression', 'zlib,5', repository, ]
arguments.extend(directories)
[arguments.extend(['--exclude', ex]) for ex in excludes]
borg.create(arguments)
def push_notification(s):
'''
Push a notification via Telegram bot
'''
http_post(
'https://eth-nookcloud.rhcloud.com/bot/push',
data={
'code': PUSH_CODE,
'text': s,
}
)
def prune():
'''
Prune backups to maintain 7 daily,
4 weekly and 6 monthly archives of THIS machine
'''
prefix = '{}-'.format(HOSTNAME)
borg.prune(
'-v',
BORG_REPOSITORY,
'--prefix', prefix,
'--keep-daily=7',
'--keep-weekly=4',
'--keep-monthly=6'
)
def main():
try:
backup_name = '{}-{}'.format(HOSTNAME, DATE)
backup(*DIRECTORIES, excludes=EXCLUDES)
except Exception as e:
push_notification(u'每日备份失败,错误原因:`{}`'.format(e))
else:
push_notification(
u'每日备份成功,存档名称:`{}::{}`'.format(BORG_REPOSITORY, backup_name)
)
if __name__ == '__main__':
main()
|
Add server daily backup script
|
Add server daily backup script
|
Python
|
bsd-2-clause
|
JokerQyou/toolset,JokerQyou/toolset
|
Add server daily backup script
|
#!/usr/bin/env python2
# coding: utf-8
'''
A daily backup script
How to use:
0. Init a borg repository using `borg init ...`
1. Fill information in this script
2. Set a daily cron job for this script
3. Sleep. :) You'll be notified once the backup finished or failed.
'''
import os
from requests import post as http_post
from sh import borg, hostname, date
# Please init before using this script
# `borg init ...`
BORG_REPOSITORY = '/your/borg/repo/name'
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
'/important/data',
'/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
'*.pyc',
'*.swp',
'/i/dont/care/about/this/data',
'/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()
def backup(*directories, **kwargs):
'''
Backup a directory using borg
'''
directories = [d for d in directories if os.path.exists(d)]
repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
excludes = kwargs.pop('excludes', [])
excludes = [excludes, ]\
if not isinstance(excludes, (list, tuple, ))\
else excludes
arguments = ['--stats', '--compression', 'zlib,5', repository, ]
arguments.extend(directories)
[arguments.extend(['--exclude', ex]) for ex in excludes]
borg.create(arguments)
def push_notification(s):
'''
Push a notification via Telegram bot
'''
http_post(
'https://eth-nookcloud.rhcloud.com/bot/push',
data={
'code': PUSH_CODE,
'text': s,
}
)
def prune():
'''
Prune backups to maintain 7 daily,
4 weekly and 6 monthly archives of THIS machine
'''
prefix = '{}-'.format(HOSTNAME)
borg.prune(
'-v',
BORG_REPOSITORY,
'--prefix', prefix,
'--keep-daily=7',
'--keep-weekly=4',
'--keep-monthly=6'
)
def main():
try:
backup_name = '{}-{}'.format(HOSTNAME, DATE)
backup(*DIRECTORIES, excludes=EXCLUDES)
except Exception as e:
push_notification(u'每日备份失败,错误原因:`{}`'.format(e))
else:
push_notification(
u'每日备份成功,存档名称:`{}::{}`'.format(BORG_REPOSITORY, backup_name)
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add server daily backup script<commit_after>
|
#!/usr/bin/env python2
# coding: utf-8
'''
A daily backup script
How to use:
0. Init a borg repository using `borg init ...`
1. Fill information in this script
2. Set a daily cron job for this script
3. Sleep. :) You'll be notified once the backup finished or failed.
'''
import os
from requests import post as http_post
from sh import borg, hostname, date
# Please init before using this script
# `borg init ...`
BORG_REPOSITORY = '/your/borg/repo/name'
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
'/important/data',
'/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
'*.pyc',
'*.swp',
'/i/dont/care/about/this/data',
'/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()
def backup(*directories, **kwargs):
'''
Backup a directory using borg
'''
directories = [d for d in directories if os.path.exists(d)]
repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
excludes = kwargs.pop('excludes', [])
excludes = [excludes, ]\
if not isinstance(excludes, (list, tuple, ))\
else excludes
arguments = ['--stats', '--compression', 'zlib,5', repository, ]
arguments.extend(directories)
[arguments.extend(['--exclude', ex]) for ex in excludes]
borg.create(arguments)
def push_notification(s):
'''
Push a notification via Telegram bot
'''
http_post(
'https://eth-nookcloud.rhcloud.com/bot/push',
data={
'code': PUSH_CODE,
'text': s,
}
)
def prune():
'''
Prune backups to maintain 7 daily,
4 weekly and 6 monthly archives of THIS machine
'''
prefix = '{}-'.format(HOSTNAME)
borg.prune(
'-v',
BORG_REPOSITORY,
'--prefix', prefix,
'--keep-daily=7',
'--keep-weekly=4',
'--keep-monthly=6'
)
def main():
try:
backup_name = '{}-{}'.format(HOSTNAME, DATE)
backup(*DIRECTORIES, excludes=EXCLUDES)
except Exception as e:
push_notification(u'每日备份失败,错误原因:`{}`'.format(e))
else:
push_notification(
u'每日备份成功,存档名称:`{}::{}`'.format(BORG_REPOSITORY, backup_name)
)
if __name__ == '__main__':
main()
|
Add server daily backup script#!/usr/bin/env python2
# coding: utf-8
'''
A daily backup script
How to use:
0. Init a borg repository using `borg init ...`
1. Fill information in this script
2. Set a daily cron job for this script
3. Sleep. :) You'll be notified once the backup finished or failed.
'''
import os
from requests import post as http_post
from sh import borg, hostname, date
# Please init before using this script
# `borg init ...`
BORG_REPOSITORY = '/your/borg/repo/name'
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
'/important/data',
'/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
'*.pyc',
'*.swp',
'/i/dont/care/about/this/data',
'/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()
def backup(*directories, **kwargs):
'''
Backup a directory using borg
'''
directories = [d for d in directories if os.path.exists(d)]
repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
excludes = kwargs.pop('excludes', [])
excludes = [excludes, ]\
if not isinstance(excludes, (list, tuple, ))\
else excludes
arguments = ['--stats', '--compression', 'zlib,5', repository, ]
arguments.extend(directories)
[arguments.extend(['--exclude', ex]) for ex in excludes]
borg.create(arguments)
def push_notification(s):
'''
Push a notification via Telegram bot
'''
http_post(
'https://eth-nookcloud.rhcloud.com/bot/push',
data={
'code': PUSH_CODE,
'text': s,
}
)
def prune():
'''
Prune backups to maintain 7 daily,
4 weekly and 6 monthly archives of THIS machine
'''
prefix = '{}-'.format(HOSTNAME)
borg.prune(
'-v',
BORG_REPOSITORY,
'--prefix', prefix,
'--keep-daily=7',
'--keep-weekly=4',
'--keep-monthly=6'
)
def main():
try:
backup_name = '{}-{}'.format(HOSTNAME, DATE)
backup(*DIRECTORIES, excludes=EXCLUDES)
except Exception as e:
push_notification(u'每日备份失败,错误原因:`{}`'.format(e))
else:
push_notification(
u'每日备份成功,存档名称:`{}::{}`'.format(BORG_REPOSITORY, backup_name)
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add server daily backup script<commit_after>#!/usr/bin/env python2
# coding: utf-8
'''
A daily backup script
How to use:
0. Init a borg repository using `borg init ...`
1. Fill information in this script
2. Set a daily cron job for this script
3. Sleep. :) You'll be notified once the backup finished or failed.
'''
import os
from requests import post as http_post
from sh import borg, hostname, date
# Please init before using this script
# `borg init ...`
BORG_REPOSITORY = '/your/borg/repo/name'
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
'/important/data',
'/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
'*.pyc',
'*.swp',
'/i/dont/care/about/this/data',
'/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()
def backup(*directories, **kwargs):
'''
Backup a directory using borg
'''
directories = [d for d in directories if os.path.exists(d)]
repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
excludes = kwargs.pop('excludes', [])
excludes = [excludes, ]\
if not isinstance(excludes, (list, tuple, ))\
else excludes
arguments = ['--stats', '--compression', 'zlib,5', repository, ]
arguments.extend(directories)
[arguments.extend(['--exclude', ex]) for ex in excludes]
borg.create(arguments)
def push_notification(s):
'''
Push a notification via Telegram bot
'''
http_post(
'https://eth-nookcloud.rhcloud.com/bot/push',
data={
'code': PUSH_CODE,
'text': s,
}
)
def prune():
'''
Prune backups to maintain 7 daily,
4 weekly and 6 monthly archives of THIS machine
'''
prefix = '{}-'.format(HOSTNAME)
borg.prune(
'-v',
BORG_REPOSITORY,
'--prefix', prefix,
'--keep-daily=7',
'--keep-weekly=4',
'--keep-monthly=6'
)
def main():
try:
backup_name = '{}-{}'.format(HOSTNAME, DATE)
backup(*DIRECTORIES, excludes=EXCLUDES)
except Exception as e:
push_notification(u'每日备份失败,错误原因:`{}`'.format(e))
else:
push_notification(
u'每日备份成功,存档名称:`{}::{}`'.format(BORG_REPOSITORY, backup_name)
)
if __name__ == '__main__':
main()
|
|
299bfcd6cff71f847023eeb3bcd4a8296457f2d9
|
scripts/merge_abr_files.py
|
scripts/merge_abr_files.py
|
import os.path
from cochlear.util import merge_abr_files
if __name__ == '__main__':
import argparse
description = 'Merge ABR files'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('files', nargs='+', help='ABR files to merge')
parser.add_argument('-o', '--out', nargs='?', help='Output filename')
args = parser.parse_args()
if args.out is None:
base, ext = os.path.splitext(args.files[0])
args.out = '{} (merged){}'.format(base, ext)
merge_abr_files(args.files, args.out)
|
Add script to merge ABR files
|
Add script to merge ABR files
|
Python
|
bsd-3-clause
|
bburan/cochlear
|
Add script to merge ABR files
|
import os.path
from cochlear.util import merge_abr_files
if __name__ == '__main__':
import argparse
description = 'Merge ABR files'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('files', nargs='+', help='ABR files to merge')
parser.add_argument('-o', '--out', nargs='?', help='Output filename')
args = parser.parse_args()
if args.out is None:
base, ext = os.path.splitext(args.files[0])
args.out = '{} (merged){}'.format(base, ext)
merge_abr_files(args.files, args.out)
|
<commit_before><commit_msg>Add script to merge ABR files<commit_after>
|
import os.path
from cochlear.util import merge_abr_files
if __name__ == '__main__':
import argparse
description = 'Merge ABR files'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('files', nargs='+', help='ABR files to merge')
parser.add_argument('-o', '--out', nargs='?', help='Output filename')
args = parser.parse_args()
if args.out is None:
base, ext = os.path.splitext(args.files[0])
args.out = '{} (merged){}'.format(base, ext)
merge_abr_files(args.files, args.out)
|
Add script to merge ABR filesimport os.path
from cochlear.util import merge_abr_files
if __name__ == '__main__':
import argparse
description = 'Merge ABR files'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('files', nargs='+', help='ABR files to merge')
parser.add_argument('-o', '--out', nargs='?', help='Output filename')
args = parser.parse_args()
if args.out is None:
base, ext = os.path.splitext(args.files[0])
args.out = '{} (merged){}'.format(base, ext)
merge_abr_files(args.files, args.out)
|
<commit_before><commit_msg>Add script to merge ABR files<commit_after>import os.path
from cochlear.util import merge_abr_files
if __name__ == '__main__':
import argparse
description = 'Merge ABR files'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('files', nargs='+', help='ABR files to merge')
parser.add_argument('-o', '--out', nargs='?', help='Output filename')
args = parser.parse_args()
if args.out is None:
base, ext = os.path.splitext(args.files[0])
args.out = '{} (merged){}'.format(base, ext)
merge_abr_files(args.files, args.out)
|
|
4631a2192b24675f61f4eec5ab68e273ea47cca8
|
sklearn/svm/sparse/base.py
|
sklearn/svm/sparse/base.py
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y, sample_weight)
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y,
sample_weight=sample_weight)
|
FIX sparse OneClassSVM was using the wrong parameter
|
FIX sparse OneClassSVM was using the wrong parameter
|
Python
|
bsd-3-clause
|
rishikksh20/scikit-learn,kmike/scikit-learn,JPFrancoia/scikit-learn,B3AU/waveTree,themrmax/scikit-learn,vybstat/scikit-learn,kaichogami/scikit-learn,IndraVikas/scikit-learn,walterreade/scikit-learn,tosolveit/scikit-learn,macks22/scikit-learn,AlexRobson/scikit-learn,heli522/scikit-learn,robbymeals/scikit-learn,xuewei4d/scikit-learn,zhenv5/scikit-learn,theoryno3/scikit-learn,themrmax/scikit-learn,alexsavio/scikit-learn,Srisai85/scikit-learn,wanggang3333/scikit-learn,walterreade/scikit-learn,rohanp/scikit-learn,loli/semisupervisedforests,luo66/scikit-learn,eg-zhang/scikit-learn,mjudsp/Tsallis,adamgreenhall/scikit-learn,jlegendary/scikit-learn,saiwing-yeung/scikit-learn,liangz0707/scikit-learn,hlin117/scikit-learn,shangwuhencc/scikit-learn,xubenben/scikit-learn,sergeyf/scikit-learn,dingocuster/scikit-learn,billy-inn/scikit-learn,IssamLaradji/scikit-learn,dsullivan7/scikit-learn,moutai/scikit-learn,toastedcornflakes/scikit-learn,xwolf12/scikit-learn,smartscheduling/scikit-learn-categorical-tree,NunoEdgarGub1/scikit-learn,kagayakidan/scikit-learn,phdowling/scikit-learn,fredhusser/scikit-learn,wzbozon/scikit-learn,xubenben/scikit-learn,jereze/scikit-learn,DonBeo/scikit-learn,bnaul/scikit-learn,jmschrei/scikit-learn,Garrett-R/scikit-learn,sumspr/scikit-learn,zaxtax/scikit-learn,CforED/Machine-Learning,Adai0808/scikit-learn,costypetrisor/scikit-learn,appapantula/scikit-learn,arabenjamin/scikit-learn,Adai0808/scikit-learn,alexsavio/scikit-learn,nrhine1/scikit-learn,nhejazi/scikit-learn,samuel1208/scikit-learn,plissonf/scikit-learn,ClimbsRocks/scikit-learn,q1ang/scikit-learn,hlin117/scikit-learn,vivekmishra1991/scikit-learn,IssamLaradji/scikit-learn,DonBeo/scikit-learn,spallavolu/scikit-learn,xwolf12/scikit-learn,fyffyt/scikit-learn,macks22/scikit-learn,vshtanko/scikit-learn,gclenaghan/scikit-learn,herilalaina/scikit-learn,Myasuka/scikit-learn,hsuantien/scikit-learn,hdmetor/scikit-learn,ilyes14/scikit-learn,stylianos-kampakis/scikit-learn,ngoix/OCRF,marcocaccin/scikit-learn,fbagirov/scikit-learn,sanketloke/scikit-learn,0x0all/scikit-learn,ningchi/scikit-learn,alexeyum/scikit-learn,PatrickChrist/scikit-learn,ilo10/scikit-learn,LiaoPan/scikit-learn,yyjiang/scikit-learn,YinongLong/scikit-learn,justincassidy/scikit-learn,pv/scikit-learn,carrillo/scikit-learn,abhishekkrthakur/scikit-learn,chrsrds/scikit-learn,shikhardb/scikit-learn,hainm/scikit-learn,xiaoxiamii/scikit-learn,djgagne/scikit-learn,devanshdalal/scikit-learn,mwv/scikit-learn,rrohan/scikit-learn,jkarnows/scikit-learn,jorge2703/scikit-learn,xwolf12/scikit-learn,thientu/scikit-learn,ilo10/scikit-learn,vibhorag/scikit-learn,Barmaley-exe/scikit-learn,amueller/scikit-learn,ndingwall/scikit-learn,jmschrei/scikit-learn,ngoix/OCRF,ogrisel/scikit-learn,chrisburr/scikit-learn,abimannans/scikit-learn,ky822/scikit-learn,nmayorov/scikit-learn,aetilley/scikit-learn,ankurankan/scikit-learn,shyamalschandra/scikit-learn,Srisai85/scikit-learn,treycausey/scikit-learn,fabianp/scikit-learn,vortex-ape/scikit-learn,RomainBrault/scikit-learn,fzalkow/scikit-learn,terkkila/scikit-learn,madjelan/scikit-learn,rishikksh20/scikit-learn,Jimmy-Morzaria/scikit-learn,frank-tancf/scikit-learn,devanshdalal/scikit-learn,JPFrancoia/scikit-learn,terkkila/scikit-learn,olologin/scikit-learn,loli/sklearn-ensembletrees,aewhatley/scikit-learn,Djabbz/scikit-learn,hsiaoyi0504/scikit-learn,shyamalschandra/scikit-learn,MartinSavc/scikit-learn,mjgrav2001/scikit-learn,xavierwu/scikit-learn,pythonvietnam/scikit-learn,raghavrv/scikit-learn,michigraber/scikit-learn,khkaminska/scikit-learn,wlamond/scikit-learn,procoder317/scikit-learn,YinongLong/scikit-learn,abimannans/scikit-learn,mwv/scikit-learn,IshankGulati/scikit-learn,lucidfrontier45/scikit-learn,mattgiguere/scikit-learn,olologin/scikit-learn,Djabbz/scikit-learn,xuewei4d/scikit-learn,wanggang3333/scikit-learn,Aasmi/scikit-learn,rsivapr/scikit-learn,liberatorqjw/scikit-learn,vermouthmjl/scikit-learn,quheng/scikit-learn,mlyundin/scikit-learn,rvraghav93/scikit-learn,rohanp/scikit-learn,ChanderG/scikit-learn,pkruskal/scikit-learn,466152112/scikit-learn,ldirer/scikit-learn,alexsavio/scikit-learn,Barmaley-exe/scikit-learn,chrsrds/scikit-learn,mlyundin/scikit-learn,thilbern/scikit-learn,victorbergelin/scikit-learn,jaidevd/scikit-learn,scikit-learn/scikit-learn,ishanic/scikit-learn,harshaneelhg/scikit-learn,yask123/scikit-learn,gotomypc/scikit-learn,potash/scikit-learn,meduz/scikit-learn,glemaitre/scikit-learn,xubenben/scikit-learn,hlin117/scikit-learn,murali-munna/scikit-learn,PrashntS/scikit-learn,mfjb/scikit-learn,vinayak-mehta/scikit-learn,simon-pepin/scikit-learn,ephes/scikit-learn,liberatorqjw/scikit-learn,lenovor/scikit-learn,stylianos-kampakis/scikit-learn,hugobowne/scikit-learn,eg-zhang/scikit-learn,Sentient07/scikit-learn,Lawrence-Liu/scikit-learn,samuel1208/scikit-learn,nomadcube/scikit-learn,espg/scikit-learn,Achuth17/scikit-learn,abimannans/scikit-learn,huzq/scikit-learn,jmschrei/scikit-learn,AlexandreAbraham/scikit-learn,lucidfrontier45/scikit-learn,vybstat/scikit-learn,3manuek/scikit-learn,huzq/scikit-learn,arjoly/scikit-learn,alexeyum/scikit-learn,murali-munna/scikit-learn,AlexRobson/scikit-learn,aabadie/scikit-learn,jayflo/scikit-learn,f3r/scikit-learn,PatrickOReilly/scikit-learn,aflaxman/scikit-learn,q1ang/scikit-learn,russel1237/scikit-learn,Obus/scikit-learn,btabibian/scikit-learn,altairpearl/scikit-learn,fabioticconi/scikit-learn,MechCoder/scikit-learn,CVML/scikit-learn,appapantula/scikit-learn,anurag313/scikit-learn,h2educ/scikit-learn,raghavrv/scikit-learn,krez13/scikit-learn,pv/scikit-learn,3manuek/scikit-learn,ephes/scikit-learn,pianomania/scikit-learn,DSLituiev/scikit-learn,sonnyhu/scikit-learn,Garrett-R/scikit-learn,petosegan/scikit-learn,ndingwall/scikit-learn,q1ang/scikit-learn,btabibian/scikit-learn,pkruskal/scikit-learn,rrohan/scikit-learn,billy-inn/scikit-learn,NelisVerhoef/scikit-learn,shusenl/scikit-learn,3manuek/scikit-learn,ltiao/scikit-learn,trankmichael/scikit-learn,manashmndl/scikit-learn,evgchz/scikit-learn,RayMick/scikit-learn,jjx02230808/project0223,Barmaley-exe/scikit-learn,jzt5132/scikit-learn,rajat1994/scikit-learn,victorbergelin/scikit-learn,anurag313/scikit-learn,ssaeger/scikit-learn,mwv/scikit-learn,qifeigit/scikit-learn,joernhees/scikit-learn,yask123/scikit-learn,ycaihua/scikit-learn,yunfeilu/scikit-learn,siutanwong/scikit-learn,fzalkow/scikit-learn,dhruv13J/scikit-learn,glennq/scikit-learn,dsullivan7/scikit-learn,rsivapr/scikit-learn,xyguo/scikit-learn,larsmans/scikit-learn,mojoboss/scikit-learn,mhdella/scikit-learn,procoder317/scikit-learn,tawsifkhan/scikit-learn,JsNoNo/scikit-learn,ChanChiChoi/scikit-learn,dsquareindia/scikit-learn,cainiaocome/scikit-learn,cauchycui/scikit-learn,shyamalschandra/scikit-learn,Akshay0724/scikit-learn,chrisburr/scikit-learn,smartscheduling/scikit-learn-categorical-tree,Barmaley-exe/scikit-learn,kylerbrown/scikit-learn,robin-lai/scikit-learn,rahul-c1/scikit-learn,jayflo/scikit-learn,rsivapr/scikit-learn,clemkoa/scikit-learn,pnedunuri/scikit-learn,xuewei4d/scikit-learn,beepee14/scikit-learn,elkingtonmcb/scikit-learn,cwu2011/scikit-learn,Vimos/scikit-learn,MatthieuBizien/scikit-learn,cl4rke/scikit-learn,jzt5132/scikit-learn,rexshihaoren/scikit-learn,procoder317/scikit-learn,kaichogami/scikit-learn,davidgbe/scikit-learn,jjx02230808/project0223,ivannz/scikit-learn,jpautom/scikit-learn,Achuth17/scikit-learn,akionakamura/scikit-learn,mayblue9/scikit-learn,belltailjp/scikit-learn,nhejazi/scikit-learn,frank-tancf/scikit-learn,yanlend/scikit-learn,CVML/scikit-learn,rrohan/scikit-learn,RPGOne/scikit-learn,herilalaina/scikit-learn,pratapvardhan/scikit-learn,abhishekgahlot/scikit-learn,manhhomienbienthuy/scikit-learn,aabadie/scikit-learn,bthirion/scikit-learn,mehdidc/scikit-learn,giorgiop/scikit-learn,eickenberg/scikit-learn,nelson-liu/scikit-learn,justincassidy/scikit-learn,CVML/scikit-learn,jakirkham/scikit-learn,sonnyhu/scikit-learn,samuel1208/scikit-learn,wazeerzulfikar/scikit-learn,loli/sklearn-ensembletrees,samzhang111/scikit-learn,anurag313/scikit-learn,madjelan/scikit-learn,MartinDelzant/scikit-learn,q1ang/scikit-learn,macks22/scikit-learn,imaculate/scikit-learn,JosmanPS/scikit-learn,mattgiguere/scikit-learn,CforED/Machine-Learning,CforED/Machine-Learning,Windy-Ground/scikit-learn,shahankhatch/scikit-learn,lin-credible/scikit-learn,jlegendary/scikit-learn,carrillo/scikit-learn,ltiao/scikit-learn,pianomania/scikit-learn,jorge2703/scikit-learn,Garrett-R/scikit-learn,hsuantien/scikit-learn,henrykironde/scikit-learn,466152112/scikit-learn,deepesch/scikit-learn,siutanwong/scikit-learn,schets/scikit-learn,ashhher3/scikit-learn,depet/scikit-learn,jayflo/scikit-learn,RPGOne/scikit-learn,beepee14/scikit-learn,quheng/scikit-learn,henrykironde/scikit-learn,rexshihaoren/scikit-learn,PrashntS/scikit-learn,mjudsp/Tsallis,robin-lai/scikit-learn,hugobowne/scikit-learn,liberatorqjw/scikit-learn,OshynSong/scikit-learn,justincassidy/scikit-learn,tomlof/scikit-learn,hrjn/scikit-learn,mikebenfield/scikit-learn,ClimbsRocks/scikit-learn,mblondel/scikit-learn,jereze/scikit-learn,vshtanko/scikit-learn,lbishal/scikit-learn,pkruskal/scikit-learn,OshynSong/scikit-learn,hsuantien/scikit-learn,sarahgrogan/scikit-learn,petosegan/scikit-learn,ChanChiChoi/scikit-learn,gotomypc/scikit-learn,treycausey/scikit-learn,jblackburne/scikit-learn,Jimmy-Morzaria/scikit-learn,ishanic/scikit-learn,spallavolu/scikit-learn,ky822/scikit-learn,aetilley/scikit-learn,gclenaghan/scikit-learn,themrmax/scikit-learn,B3AU/waveTree,Clyde-fare/scikit-learn,tmhm/scikit-learn,yonglehou/scikit-learn,hdmetor/scikit-learn,mugizico/scikit-learn,appapantula/scikit-learn,vibhorag/scikit-learn,vivekmishra1991/scikit-learn,anntzer/scikit-learn,NelisVerhoef/scikit-learn,voxlol/scikit-learn,plissonf/scikit-learn,466152112/scikit-learn,vshtanko/scikit-learn,cl4rke/scikit-learn,aabadie/scikit-learn,lbishal/scikit-learn,trungnt13/scikit-learn,abhishekgahlot/scikit-learn,chrisburr/scikit-learn,henrykironde/scikit-learn,NelisVerhoef/scikit-learn,mhue/scikit-learn,loli/sklearn-ensembletrees,djgagne/scikit-learn,zorroblue/scikit-learn,sarahgrogan/scikit-learn,zorojean/scikit-learn,zuku1985/scikit-learn,hsiaoyi0504/scikit-learn,yyjiang/scikit-learn,altairpearl/scikit-learn,icdishb/scikit-learn,xzh86/scikit-learn,ilyes14/scikit-learn,akionakamura/scikit-learn,tawsifkhan/scikit-learn,rohanp/scikit-learn,sinhrks/scikit-learn,roxyboy/scikit-learn,yask123/scikit-learn,mxjl620/scikit-learn,AIML/scikit-learn,ogrisel/scikit-learn,lesteve/scikit-learn,vigilv/scikit-learn,nikitasingh981/scikit-learn,shahankhatch/scikit-learn,eickenberg/scikit-learn,DSLituiev/scikit-learn,arahuja/scikit-learn,Sentient07/scikit-learn,RomainBrault/scikit-learn,khkaminska/scikit-learn,rishikksh20/scikit-learn,yyjiang/scikit-learn,mjgrav2001/scikit-learn,ldirer/scikit-learn,zaxtax/scikit-learn,mayblue9/scikit-learn,meduz/scikit-learn,PatrickChrist/scikit-learn,icdishb/scikit-learn,massmutual/scikit-learn,pianomania/scikit-learn,ChanderG/scikit-learn,Myasuka/scikit-learn,chrsrds/scikit-learn,madjelan/scikit-learn,sergeyf/scikit-learn,fabianp/scikit-learn,IndraVikas/scikit-learn,iismd17/scikit-learn,iismd17/scikit-learn,harshaneelhg/scikit-learn,terkkila/scikit-learn,scikit-learn/scikit-learn,vinayak-mehta/scikit-learn,mugizico/scikit-learn,rexshihaoren/scikit-learn,xuewei4d/scikit-learn,poryfly/scikit-learn,phdowling/scikit-learn,elkingtonmcb/scikit-learn,bigdataelephants/scikit-learn,kylerbrown/scikit-learn,arjoly/scikit-learn,potash/scikit-learn,MatthieuBizien/scikit-learn,ngoix/OCRF,pompiduskus/scikit-learn,bhargav/scikit-learn,lin-credible/scikit-learn,joshloyal/scikit-learn,ElDeveloper/scikit-learn,ZENGXH/scikit-learn,adamgreenhall/scikit-learn,fbagirov/scikit-learn,clemkoa/scikit-learn,PrashntS/scikit-learn,rohanp/scikit-learn,adamgreenhall/scikit-learn,RomainBrault/scikit-learn,joernhees/scikit-learn,petosegan/scikit-learn,vybstat/scikit-learn,gclenaghan/scikit-learn,espg/scikit-learn,jmetzen/scikit-learn,xiaoxiamii/scikit-learn,cwu2011/scikit-learn,cainiaocome/scikit-learn,aetilley/scikit-learn,mblondel/scikit-learn,wlamond/scikit-learn,IshankGulati/scikit-learn,ngoix/OCRF,liberatorqjw/scikit-learn,costypetrisor/scikit-learn,alvarofierroclavero/scikit-learn,MatthieuBizien/scikit-learn,sumspr/scikit-learn,mattilyra/scikit-learn,ilyes14/scikit-learn,RayMick/scikit-learn,equialgo/scikit-learn,mrshu/scikit-learn,mayblue9/scikit-learn,victorbergelin/scikit-learn,rajat1994/scikit-learn,fyffyt/scikit-learn,akionakamura/scikit-learn,kevin-intel/scikit-learn,toastedcornflakes/scikit-learn,xyguo/scikit-learn,mattilyra/scikit-learn,mugizico/scikit-learn,loli/semisupervisedforests,kashif/scikit-learn,jmschrei/scikit-learn,hugobowne/scikit-learn,nomadcube/scikit-learn,ogrisel/scikit-learn,mattilyra/scikit-learn,jblackburne/scikit-learn,jzt5132/scikit-learn,jzt5132/scikit-learn,nvoron23/scikit-learn,khkaminska/scikit-learn,sinhrks/scikit-learn,jm-begon/scikit-learn,ElDeveloper/scikit-learn,RayMick/scikit-learn,ky822/scikit-learn,michigraber/scikit-learn,hrjn/scikit-learn,TomDLT/scikit-learn,0asa/scikit-learn,ChanderG/scikit-learn,AlexandreAbraham/scikit-learn,h2educ/scikit-learn,xavierwu/scikit-learn,lbishal/scikit-learn,meduz/scikit-learn,Garrett-R/scikit-learn,xzh86/scikit-learn,r-mart/scikit-learn,robbymeals/scikit-learn,massmutual/scikit-learn,siutanwong/scikit-learn,waterponey/scikit-learn,Nyker510/scikit-learn,Akshay0724/scikit-learn,simon-pepin/scikit-learn,DonBeo/scikit-learn,sarahgrogan/scikit-learn,cybernet14/scikit-learn,zaxtax/scikit-learn,toastedcornflakes/scikit-learn,jorik041/scikit-learn,evgchz/scikit-learn,arahuja/scikit-learn,Akshay0724/scikit-learn,imaculate/scikit-learn,pnedunuri/scikit-learn,kaichogami/scikit-learn,nikitasingh981/scikit-learn,poryfly/scikit-learn,ycaihua/scikit-learn,gotomypc/scikit-learn,ycaihua/scikit-learn,kylerbrown/scikit-learn,fengzhyuan/scikit-learn,AnasGhrab/scikit-learn,pnedunuri/scikit-learn,rexshihaoren/scikit-learn,0x0all/scikit-learn,AlexRobson/scikit-learn,nvoron23/scikit-learn,TomDLT/scikit-learn,voxlol/scikit-learn,AlexanderFabisch/scikit-learn,UNR-AERIAL/scikit-learn,beepee14/scikit-learn,cwu2011/scikit-learn,mojoboss/scikit-learn,Achuth17/scikit-learn,bthirion/scikit-learn,bthirion/scikit-learn,jkarnows/scikit-learn,liyu1990/sklearn,anirudhjayaraman/scikit-learn,bikong2/scikit-learn,0asa/scikit-learn,xzh86/scikit-learn,vinayak-mehta/scikit-learn,dingocuster/scikit-learn,jlegendary/scikit-learn,wazeerzulfikar/scikit-learn,anntzer/scikit-learn,abhishekgahlot/scikit-learn,bigdataelephants/scikit-learn,IssamLaradji/scikit-learn,Aasmi/scikit-learn,huzq/scikit-learn,untom/scikit-learn,ssaeger/scikit-learn,jjx02230808/project0223,mattilyra/scikit-learn,Obus/scikit-learn,etkirsch/scikit-learn,rajat1994/scikit-learn,jseabold/scikit-learn,MechCoder/scikit-learn,PatrickOReilly/scikit-learn,themrmax/scikit-learn,Srisai85/scikit-learn,AlexanderFabisch/scikit-learn,larsmans/scikit-learn,hitszxp/scikit-learn,altairpearl/scikit-learn,depet/scikit-learn,mhue/scikit-learn,lucidfrontier45/scikit-learn,untom/scikit-learn,Windy-Ground/scikit-learn,ishanic/scikit-learn,kevin-intel/scikit-learn,MartinSavc/scikit-learn,xavierwu/scikit-learn,glouppe/scikit-learn,larsmans/scikit-learn,dsullivan7/scikit-learn,PatrickOReilly/scikit-learn,trungnt13/scikit-learn,alexeyum/scikit-learn,zorojean/scikit-learn,marcocaccin/scikit-learn,yonglehou/scikit-learn,vortex-ape/scikit-learn,shangwuhencc/scikit-learn,rsivapr/scikit-learn,equialgo/scikit-learn,shenzebang/scikit-learn,shusenl/scikit-learn,waterponey/scikit-learn,fabioticconi/scikit-learn,mehdidc/scikit-learn,hitszxp/scikit-learn,nesterione/scikit-learn,aminert/scikit-learn,jm-begon/scikit-learn,jakirkham/scikit-learn,Nyker510/scikit-learn,poryfly/scikit-learn,ZENGXH/scikit-learn,PatrickChrist/scikit-learn,shusenl/scikit-learn,joernhees/scikit-learn,rahul-c1/scikit-learn,eickenberg/scikit-learn,liyu1990/sklearn,ZENGXH/scikit-learn,0x0all/scikit-learn,mfjb/scikit-learn,ChanChiChoi/scikit-learn,RachitKansal/scikit-learn,ilyes14/scikit-learn,anirudhjayaraman/scikit-learn,pythonvietnam/scikit-learn,giorgiop/scikit-learn,sumspr/scikit-learn,scikit-learn/scikit-learn,andaag/scikit-learn,nesterione/scikit-learn,xavierwu/scikit-learn,LiaoPan/scikit-learn,plissonf/scikit-learn,chrsrds/scikit-learn,BiaDarkia/scikit-learn,yunfeilu/scikit-learn,jakobworldpeace/scikit-learn,jmetzen/scikit-learn,waterponey/scikit-learn,dingocuster/scikit-learn,nhejazi/scikit-learn,Jimmy-Morzaria/scikit-learn,yyjiang/scikit-learn,CVML/scikit-learn,jseabold/scikit-learn,belltailjp/scikit-learn,imaculate/scikit-learn,ankurankan/scikit-learn,carrillo/scikit-learn,shangwuhencc/scikit-learn,zorroblue/scikit-learn,TomDLT/scikit-learn,fengzhyuan/scikit-learn,cybernet14/scikit-learn,abhishekkrthakur/scikit-learn,yonglehou/scikit-learn,mattilyra/scikit-learn,AIML/scikit-learn,mayblue9/scikit-learn,ephes/scikit-learn,arabenjamin/scikit-learn,dsquareindia/scikit-learn,depet/scikit-learn,manhhomienbienthuy/scikit-learn,kevin-intel/scikit-learn,manashmndl/scikit-learn,nrhine1/scikit-learn,Clyde-fare/scikit-learn,yonglehou/scikit-learn,ashhher3/scikit-learn,mjudsp/Tsallis,deepesch/scikit-learn,kagayakidan/scikit-learn,anirudhjayaraman/scikit-learn,ivannz/scikit-learn,RPGOne/scikit-learn,arabenjamin/scikit-learn,nvoron23/scikit-learn,Clyde-fare/scikit-learn,clemkoa/scikit-learn,pompiduskus/scikit-learn,tomlof/scikit-learn,Vimos/scikit-learn,sarahgrogan/scikit-learn,fzalkow/scikit-learn,MohammedWasim/scikit-learn,eg-zhang/scikit-learn,etkirsch/scikit-learn,mikebenfield/scikit-learn,treycausey/scikit-learn,thilbern/scikit-learn,3manuek/scikit-learn,mxjl620/scikit-learn,mfjb/scikit-learn,qifeigit/scikit-learn,voxlol/scikit-learn,AlexandreAbraham/scikit-learn,kagayakidan/scikit-learn,glemaitre/scikit-learn,maheshakya/scikit-learn,alexsavio/scikit-learn,Clyde-fare/scikit-learn,pypot/scikit-learn,ycaihua/scikit-learn,mojoboss/scikit-learn,huobaowangxi/scikit-learn,ZenDevelopmentSystems/scikit-learn,wzbozon/scikit-learn,B3AU/waveTree,siutanwong/scikit-learn,nesterione/scikit-learn,betatim/scikit-learn,plissonf/scikit-learn,rahul-c1/scikit-learn,zhenv5/scikit-learn,xiaoxiamii/scikit-learn,evgchz/scikit-learn,nelson-liu/scikit-learn,robin-lai/scikit-learn,aflaxman/scikit-learn,iismd17/scikit-learn,glemaitre/scikit-learn,fredhusser/scikit-learn,zuku1985/scikit-learn,idlead/scikit-learn,bthirion/scikit-learn,loli/semisupervisedforests,vortex-ape/scikit-learn,pratapvardhan/scikit-learn,ningchi/scikit-learn,sonnyhu/scikit-learn,jorge2703/scikit-learn,cybernet14/scikit-learn,nikitasingh981/scikit-learn,Windy-Ground/scikit-learn,shikhardb/scikit-learn,appapantula/scikit-learn,abhishekkrthakur/scikit-learn,fabianp/scikit-learn,B3AU/waveTree,DonBeo/scikit-learn,trankmichael/scikit-learn,zorroblue/scikit-learn,JeanKossaifi/scikit-learn,kashif/scikit-learn,spallavolu/scikit-learn,mhdella/scikit-learn,heli522/scikit-learn,ldirer/scikit-learn,AlexRobson/scikit-learn,ashhher3/scikit-learn,michigraber/scikit-learn,ssaeger/scikit-learn,arjoly/scikit-learn,nikitasingh981/scikit-learn,zhenv5/scikit-learn,thientu/scikit-learn,espg/scikit-learn,Djabbz/scikit-learn,AlexandreAbraham/scikit-learn,jaidevd/scikit-learn,mblondel/scikit-learn,lazywei/scikit-learn,ssaeger/scikit-learn,BiaDarkia/scikit-learn,henrykironde/scikit-learn,glemaitre/scikit-learn,djgagne/scikit-learn,qifeigit/scikit-learn,elkingtonmcb/scikit-learn,ZENGXH/scikit-learn,vivekmishra1991/scikit-learn,Aasmi/scikit-learn,fabianp/scikit-learn,lesteve/scikit-learn,LohithBlaze/scikit-learn,belltailjp/scikit-learn,phdowling/scikit-learn,pythonvietnam/scikit-learn,Achuth17/scikit-learn,Jimmy-Morzaria/scikit-learn,HolgerPeters/scikit-learn,sanketloke/scikit-learn,Srisai85/scikit-learn,MechCoder/scikit-learn,jm-begon/scikit-learn,maheshakya/scikit-learn,simon-pepin/scikit-learn,kevin-intel/scikit-learn,manashmndl/scikit-learn,dingocuster/scikit-learn,fengzhyuan/scikit-learn,giorgiop/scikit-learn,vibhorag/scikit-learn,ningchi/scikit-learn,loli/semisupervisedforests,HolgerPeters/scikit-learn,kmike/scikit-learn,jblackburne/scikit-learn,walterreade/scikit-learn,ivannz/scikit-learn,mattgiguere/scikit-learn,lenovor/scikit-learn,Titan-C/scikit-learn,untom/scikit-learn,betatim/scikit-learn,bikong2/scikit-learn,glennq/scikit-learn,hrjn/scikit-learn,voxlol/scikit-learn,shikhardb/scikit-learn,fabioticconi/scikit-learn,schets/scikit-learn,shikhardb/scikit-learn,florian-f/sklearn,amueller/scikit-learn,AlexanderFabisch/scikit-learn,quheng/scikit-learn,Vimos/scikit-learn,Akshay0724/scikit-learn,beepee14/scikit-learn,h2educ/scikit-learn,fredhusser/scikit-learn,h2educ/scikit-learn,robin-lai/scikit-learn,MohammedWasim/scikit-learn,jm-begon/scikit-learn,fzalkow/scikit-learn,henridwyer/scikit-learn,treycausey/scikit-learn,ltiao/scikit-learn,hsiaoyi0504/scikit-learn,rishikksh20/scikit-learn,costypetrisor/scikit-learn,waterponey/scikit-learn,procoder317/scikit-learn,ilo10/scikit-learn,luo66/scikit-learn,lazywei/scikit-learn,RachitKansal/scikit-learn,Lawrence-Liu/scikit-learn,pianomania/scikit-learn,abimannans/scikit-learn,huobaowangxi/scikit-learn,xzh86/scikit-learn,ChanChiChoi/scikit-learn,yanlend/scikit-learn,wazeerzulfikar/scikit-learn,jakobworldpeace/scikit-learn,AnasGhrab/scikit-learn,abhishekgahlot/scikit-learn,tdhopper/scikit-learn,lucidfrontier45/scikit-learn,tomlof/scikit-learn,anntzer/scikit-learn,sergeyf/scikit-learn,shahankhatch/scikit-learn,0asa/scikit-learn,andaag/scikit-learn,andaag/scikit-learn,krez13/scikit-learn,espg/scikit-learn,JeanKossaifi/scikit-learn,Nyker510/scikit-learn,Fireblend/scikit-learn,qifeigit/scikit-learn,Obus/scikit-learn,bnaul/scikit-learn,jlegendary/scikit-learn,marcocaccin/scikit-learn,MartinSavc/scikit-learn,DSLituiev/scikit-learn,madjelan/scikit-learn,krez13/scikit-learn,zihua/scikit-learn,russel1237/scikit-learn,alexeyum/scikit-learn,ZenDevelopmentSystems/scikit-learn,Garrett-R/scikit-learn,jmetzen/scikit-learn,cl4rke/scikit-learn,bnaul/scikit-learn,pratapvardhan/scikit-learn,nomadcube/scikit-learn,etkirsch/scikit-learn,frank-tancf/scikit-learn,anntzer/scikit-learn,nmayorov/scikit-learn,MohammedWasim/scikit-learn,evgchz/scikit-learn,samuel1208/scikit-learn,massmutual/scikit-learn,glouppe/scikit-learn,akionakamura/scikit-learn,larsmans/scikit-learn,LohithBlaze/scikit-learn,sanketloke/scikit-learn,0x0all/scikit-learn,glouppe/scikit-learn,mikebenfield/scikit-learn,aetilley/scikit-learn,florian-f/sklearn,IshankGulati/scikit-learn,cwu2011/scikit-learn,raghavrv/scikit-learn,MohammedWasim/scikit-learn,wlamond/scikit-learn,mjudsp/Tsallis,aflaxman/scikit-learn,AnasGhrab/scikit-learn,imaculate/scikit-learn,macks22/scikit-learn,rajat1994/scikit-learn,harshaneelhg/scikit-learn,simon-pepin/scikit-learn,cybernet14/scikit-learn,mehdidc/scikit-learn,lin-credible/scikit-learn,vermouthmjl/scikit-learn,hainm/scikit-learn,vinayak-mehta/scikit-learn,xubenben/scikit-learn,luo66/scikit-learn,trungnt13/scikit-learn,henridwyer/scikit-learn,andrewnc/scikit-learn,fbagirov/scikit-learn,lin-credible/scikit-learn,BiaDarkia/scikit-learn,UNR-AERIAL/scikit-learn,jorik041/scikit-learn,Titan-C/scikit-learn,AIML/scikit-learn,B3AU/waveTree,poryfly/scikit-learn,hugobowne/scikit-learn,alvarofierroclavero/scikit-learn,equialgo/scikit-learn,maheshakya/scikit-learn,mwv/scikit-learn,ilo10/scikit-learn,vermouthmjl/scikit-learn,vortex-ape/scikit-learn,dsquareindia/scikit-learn,victorbergelin/scikit-learn,smartscheduling/scikit-learn-categorical-tree,zihua/scikit-learn,mxjl620/scikit-learn,AnasGhrab/scikit-learn,btabibian/scikit-learn,moutai/scikit-learn,davidgbe/scikit-learn,bigdataelephants/scikit-learn,bikong2/scikit-learn,aewhatley/scikit-learn,JosmanPS/scikit-learn,eickenberg/scikit-learn,yask123/scikit-learn,mlyundin/scikit-learn,ndingwall/scikit-learn,Obus/scikit-learn,massmutual/scikit-learn,nrhine1/scikit-learn,Sentient07/scikit-learn,heli522/scikit-learn,kmike/scikit-learn,0x0all/scikit-learn,mattgiguere/scikit-learn,nmayorov/scikit-learn,khkaminska/scikit-learn,JsNoNo/scikit-learn,RPGOne/scikit-learn,MatthieuBizien/scikit-learn,arahuja/scikit-learn,MartinSavc/scikit-learn,pypot/scikit-learn,smartscheduling/scikit-learn-categorical-tree,TomDLT/scikit-learn,henridwyer/scikit-learn,liyu1990/sklearn,CforED/Machine-Learning,tmhm/scikit-learn,yunfeilu/scikit-learn,joshloyal/scikit-learn,vybstat/scikit-learn,betatim/scikit-learn,hitszxp/scikit-learn,zuku1985/scikit-learn,hsiaoyi0504/scikit-learn,gclenaghan/scikit-learn,billy-inn/scikit-learn,lesteve/scikit-learn,vibhorag/scikit-learn,mfjb/scikit-learn,bhargav/scikit-learn,pkruskal/scikit-learn,kjung/scikit-learn,betatim/scikit-learn,thilbern/scikit-learn,JPFrancoia/scikit-learn,ClimbsRocks/scikit-learn,PrashntS/scikit-learn,loli/sklearn-ensembletrees,ahoyosid/scikit-learn,luo66/scikit-learn,0asa/scikit-learn,equialgo/scikit-learn,potash/scikit-learn,pv/scikit-learn,belltailjp/scikit-learn,xwolf12/scikit-learn,bhargav/scikit-learn,pompiduskus/scikit-learn,icdishb/scikit-learn,quheng/scikit-learn,zuku1985/scikit-learn,jblackburne/scikit-learn,maheshakya/scikit-learn,heli522/scikit-learn,Nyker510/scikit-learn,0asa/scikit-learn,hlin117/scikit-learn,jereze/scikit-learn,spallavolu/scikit-learn,henridwyer/scikit-learn,RachitKansal/scikit-learn,f3r/scikit-learn,jereze/scikit-learn,jpautom/scikit-learn,chrisburr/scikit-learn,robbymeals/scikit-learn,IndraVikas/scikit-learn,ankurankan/scikit-learn,dsullivan7/scikit-learn,MartinDelzant/scikit-learn,theoryno3/scikit-learn,bikong2/scikit-learn,anirudhjayaraman/scikit-learn,tawsifkhan/scikit-learn,loli/sklearn-ensembletrees,jakobworldpeace/scikit-learn,lenovor/scikit-learn,NunoEdgarGub1/scikit-learn,lucidfrontier45/scikit-learn,NunoEdgarGub1/scikit-learn,vivekmishra1991/scikit-learn,nesterione/scikit-learn,jkarnows/scikit-learn,murali-munna/scikit-learn,wlamond/scikit-learn,florian-f/sklearn,liangz0707/scikit-learn,alvarofierroclavero/scikit-learn,krez13/scikit-learn,davidgbe/scikit-learn,schets/scikit-learn,fredhusser/scikit-learn,herilalaina/scikit-learn,Myasuka/scikit-learn,walterreade/scikit-learn,tmhm/scikit-learn,vshtanko/scikit-learn,arahuja/scikit-learn,r-mart/scikit-learn,mjudsp/Tsallis,yanlend/scikit-learn,IssamLaradji/scikit-learn,hainm/scikit-learn,meduz/scikit-learn,kjung/scikit-learn,mlyundin/scikit-learn,rvraghav93/scikit-learn,evgchz/scikit-learn,altairpearl/scikit-learn,fabioticconi/scikit-learn,fengzhyuan/scikit-learn,ky822/scikit-learn,glouppe/scikit-learn,wzbozon/scikit-learn,vigilv/scikit-learn,maheshakya/scikit-learn,deepesch/scikit-learn,costypetrisor/scikit-learn,moutai/scikit-learn,adamgreenhall/scikit-learn,zhenv5/scikit-learn,samzhang111/scikit-learn,idlead/scikit-learn,harshaneelhg/scikit-learn,stylianos-kampakis/scikit-learn,OshynSong/scikit-learn,ndingwall/scikit-learn,ClimbsRocks/scikit-learn,Titan-C/scikit-learn,RomainBrault/scikit-learn,Djabbz/scikit-learn,florian-f/sklearn,stylianos-kampakis/scikit-learn,PatrickChrist/scikit-learn,RachitKansal/scikit-learn,bhargav/scikit-learn,roxyboy/scikit-learn,davidgbe/scikit-learn,thilbern/scikit-learn,murali-munna/scikit-learn,olologin/scikit-learn,NunoEdgarGub1/scikit-learn,jjx02230808/project0223,trankmichael/scikit-learn,samzhang111/scikit-learn,zihua/scikit-learn,mrshu/scikit-learn,aminert/scikit-learn,sonnyhu/scikit-learn,liangz0707/scikit-learn,tmhm/scikit-learn,arjoly/scikit-learn,Adai0808/scikit-learn,nomadcube/scikit-learn,xyguo/scikit-learn,kmike/scikit-learn,eg-zhang/scikit-learn,JsNoNo/scikit-learn,hrjn/scikit-learn,Fireblend/scikit-learn,MartinDelzant/scikit-learn,pypot/scikit-learn,r-mart/scikit-learn,aflaxman/scikit-learn,shusenl/scikit-learn,AlexanderFabisch/scikit-learn,RayMick/scikit-learn,shahankhatch/scikit-learn,cainiaocome/scikit-learn,OshynSong/scikit-learn,jaidevd/scikit-learn,russel1237/scikit-learn,carrillo/scikit-learn,JeanKossaifi/scikit-learn,sergeyf/scikit-learn,kylerbrown/scikit-learn,mjgrav2001/scikit-learn,terkkila/scikit-learn,ahoyosid/scikit-learn,cauchycui/scikit-learn,olologin/scikit-learn,JeanKossaifi/scikit-learn,clemkoa/scikit-learn,IndraVikas/scikit-learn,DSLituiev/scikit-learn,shenzebang/scikit-learn,mrshu/scikit-learn,ElDeveloper/scikit-learn,eickenberg/scikit-learn,btabibian/scikit-learn,theoryno3/scikit-learn,JsNoNo/scikit-learn,Sentient07/scikit-learn,wanggang3333/scikit-learn,rahuldhote/scikit-learn,mhdella/scikit-learn,saiwing-yeung/scikit-learn,jakirkham/scikit-learn,etkirsch/scikit-learn,wanggang3333/scikit-learn,sanketloke/scikit-learn,huzq/scikit-learn,shenzebang/scikit-learn,mojoboss/scikit-learn,nmayorov/scikit-learn,schets/scikit-learn,potash/scikit-learn,glennq/scikit-learn,ahoyosid/scikit-learn,alvarofierroclavero/scikit-learn,mblondel/scikit-learn,YinongLong/scikit-learn,lbishal/scikit-learn,toastedcornflakes/scikit-learn,yunfeilu/scikit-learn,depet/scikit-learn,pompiduskus/scikit-learn,IshankGulati/scikit-learn,rrohan/scikit-learn,trungnt13/scikit-learn,ngoix/OCRF,samzhang111/scikit-learn,mrshu/scikit-learn,tdhopper/scikit-learn,ngoix/OCRF,wazeerzulfikar/scikit-learn,Vimos/scikit-learn,hdmetor/scikit-learn,Aasmi/scikit-learn,scikit-learn/scikit-learn,michigraber/scikit-learn,andaag/scikit-learn,cl4rke/scikit-learn,xyguo/scikit-learn,ephes/scikit-learn,ningchi/scikit-learn,mhue/scikit-learn,nvoron23/scikit-learn,rvraghav93/scikit-learn,frank-tancf/scikit-learn,ltiao/scikit-learn,pv/scikit-learn,mjgrav2001/scikit-learn,untom/scikit-learn,ankurankan/scikit-learn,jakobworldpeace/scikit-learn,aminert/scikit-learn,shyamalschandra/scikit-learn,ZenDevelopmentSystems/scikit-learn,hainm/scikit-learn,hdmetor/scikit-learn,JPFrancoia/scikit-learn,ivannz/scikit-learn,LohithBlaze/scikit-learn,kmike/scikit-learn,466152112/scikit-learn,shangwuhencc/scikit-learn,ycaihua/scikit-learn,ChanderG/scikit-learn,amueller/scikit-learn,tawsifkhan/scikit-learn,AIML/scikit-learn,depet/scikit-learn,tosolveit/scikit-learn,manhhomienbienthuy/scikit-learn,zihua/scikit-learn,tosolveit/scikit-learn,Fireblend/scikit-learn,vigilv/scikit-learn,Lawrence-Liu/scikit-learn,LiaoPan/scikit-learn,jorik041/scikit-learn,ishanic/scikit-learn,LiaoPan/scikit-learn,roxyboy/scikit-learn,hsuantien/scikit-learn,bnaul/scikit-learn,jorik041/scikit-learn,iismd17/scikit-learn,kashif/scikit-learn,huobaowangxi/scikit-learn,aabadie/scikit-learn,lazywei/scikit-learn,phdowling/scikit-learn,nhejazi/scikit-learn,abhishekgahlot/scikit-learn,jmetzen/scikit-learn,thientu/scikit-learn,BiaDarkia/scikit-learn,fyffyt/scikit-learn,zorojean/scikit-learn,Windy-Ground/scikit-learn,ashhher3/scikit-learn,tdhopper/scikit-learn,mrshu/scikit-learn,moutai/scikit-learn,kagayakidan/scikit-learn,roxyboy/scikit-learn,rsivapr/scikit-learn,mhue/scikit-learn,joernhees/scikit-learn,sumspr/scikit-learn,xiaoxiamii/scikit-learn,jakirkham/scikit-learn,jorge2703/scikit-learn,kaichogami/scikit-learn,NelisVerhoef/scikit-learn,rahuldhote/scikit-learn,ElDeveloper/scikit-learn,treycausey/scikit-learn,pythonvietnam/scikit-learn,petosegan/scikit-learn,rvraghav93/scikit-learn,fbagirov/scikit-learn,aewhatley/scikit-learn,icdishb/scikit-learn,florian-f/sklearn,Titan-C/scikit-learn,cainiaocome/scikit-learn,HolgerPeters/scikit-learn,dhruv13J/scikit-learn,andrewnc/scikit-learn,zorroblue/scikit-learn,kjung/scikit-learn,cauchycui/scikit-learn,manhhomienbienthuy/scikit-learn,rahuldhote/scikit-learn,mehdidc/scikit-learn,amueller/scikit-learn,jkarnows/scikit-learn,nelson-liu/scikit-learn,idlead/scikit-learn,wzbozon/scikit-learn,ahoyosid/scikit-learn,idlead/scikit-learn,sinhrks/scikit-learn,theoryno3/scikit-learn,mhdella/scikit-learn,abhishekkrthakur/scikit-learn,yanlend/scikit-learn,justincassidy/scikit-learn,aminert/scikit-learn,herilalaina/scikit-learn,jayflo/scikit-learn,f3r/scikit-learn,UNR-AERIAL/scikit-learn,joshloyal/scikit-learn,YinongLong/scikit-learn,huobaowangxi/scikit-learn,MartinDelzant/scikit-learn,dsquareindia/scikit-learn,Myasuka/scikit-learn,HolgerPeters/scikit-learn,anurag313/scikit-learn,giorgiop/scikit-learn,jaidevd/scikit-learn,billy-inn/scikit-learn,russel1237/scikit-learn,kashif/scikit-learn,MechCoder/scikit-learn,deepesch/scikit-learn,robbymeals/scikit-learn,dhruv13J/scikit-learn,hitszxp/scikit-learn,dhruv13J/scikit-learn,Adai0808/scikit-learn,lesteve/scikit-learn,rahul-c1/scikit-learn,ankurankan/scikit-learn,andrewnc/scikit-learn,UNR-AERIAL/scikit-learn,larsmans/scikit-learn,tdhopper/scikit-learn,saiwing-yeung/scikit-learn,jseabold/scikit-learn,mikebenfield/scikit-learn,shenzebang/scikit-learn,hitszxp/scikit-learn,tosolveit/scikit-learn,mugizico/scikit-learn,lazywei/scikit-learn,manashmndl/scikit-learn,lenovor/scikit-learn,Fireblend/scikit-learn,pypot/scikit-learn,fyffyt/scikit-learn,pnedunuri/scikit-learn,jpautom/scikit-learn,mxjl620/scikit-learn,djgagne/scikit-learn,nelson-liu/scikit-learn,elkingtonmcb/scikit-learn,glennq/scikit-learn,vigilv/scikit-learn,kjung/scikit-learn,r-mart/scikit-learn,nrhine1/scikit-learn,arabenjamin/scikit-learn,gotomypc/scikit-learn,sinhrks/scikit-learn,saiwing-yeung/scikit-learn,rahuldhote/scikit-learn,PatrickOReilly/scikit-learn,raghavrv/scikit-learn,f3r/scikit-learn,thientu/scikit-learn,joshloyal/scikit-learn,Lawrence-Liu/scikit-learn,LohithBlaze/scikit-learn,marcocaccin/scikit-learn,liangz0707/scikit-learn,cauchycui/scikit-learn,ZenDevelopmentSystems/scikit-learn,bigdataelephants/scikit-learn,devanshdalal/scikit-learn,JosmanPS/scikit-learn,liyu1990/sklearn,trankmichael/scikit-learn,jseabold/scikit-learn,JosmanPS/scikit-learn,andrewnc/scikit-learn,pratapvardhan/scikit-learn,ogrisel/scikit-learn,zaxtax/scikit-learn,devanshdalal/scikit-learn,vermouthmjl/scikit-learn,aewhatley/scikit-learn,jpautom/scikit-learn,zorojean/scikit-learn,ldirer/scikit-learn,tomlof/scikit-learn
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y, sample_weight)
FIX sparse OneClassSVM was using the wrong parameter
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y,
sample_weight=sample_weight)
|
<commit_before>import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y, sample_weight)
<commit_msg>FIX sparse OneClassSVM was using the wrong parameter<commit_after>
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y,
sample_weight=sample_weight)
|
import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y, sample_weight)
FIX sparse OneClassSVM was using the wrong parameterimport numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y,
sample_weight=sample_weight)
|
<commit_before>import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y, sample_weight)
<commit_msg>FIX sparse OneClassSVM was using the wrong parameter<commit_after>import numpy as np
import scipy.sparse
from abc import ABCMeta, abstractmethod
from ..base import BaseLibSVM
class SparseBaseLibSVM(BaseLibSVM):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, impl, kernel, degree, gamma, coef0,
tol, C, nu, epsilon, shrinking, probability, cache_size,
class_weight, verbose):
assert kernel in self._sparse_kernels, \
"kernel should be one of %s, "\
"%s was given." % (self._kernel_types, kernel)
super(SparseBaseLibSVM, self).__init__(impl, kernel, degree, gamma,
coef0, tol, C, nu, epsilon, shrinking, probability, cache_size,
True, class_weight, verbose)
def fit(self, X, y, sample_weight=None):
X = scipy.sparse.csr_matrix(X, dtype=np.float64)
return super(SparseBaseLibSVM, self).fit(X, y,
sample_weight=sample_weight)
|
37a8da7f759e2218c63b9c56e69cca0987855cc9
|
comics/crawler/crawlers/johnnywander.py
|
comics/crawler/crawlers/johnnywander.py
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Johnny Wander'
language = 'en'
url = 'http://www.johnnywander.com/'
start_date = '2008-09-30'
history_capable_days = 40
schedule = 'Tu,Th'
time_zone = -8
rights = 'Yuko Ota & Ananth Panagariya'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.johnnywander.com/feed')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('title='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
Add crawler for Johnny Wander
|
Add crawler for Johnny Wander
|
Python
|
agpl-3.0
|
jodal/comics,jodal/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,datagutten/comics,klette/comics,datagutten/comics,datagutten/comics,datagutten/comics
|
Add crawler for Johnny Wander
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Johnny Wander'
language = 'en'
url = 'http://www.johnnywander.com/'
start_date = '2008-09-30'
history_capable_days = 40
schedule = 'Tu,Th'
time_zone = -8
rights = 'Yuko Ota & Ananth Panagariya'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.johnnywander.com/feed')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('title='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
<commit_before><commit_msg>Add crawler for Johnny Wander<commit_after>
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Johnny Wander'
language = 'en'
url = 'http://www.johnnywander.com/'
start_date = '2008-09-30'
history_capable_days = 40
schedule = 'Tu,Th'
time_zone = -8
rights = 'Yuko Ota & Ananth Panagariya'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.johnnywander.com/feed')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('title='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
Add crawler for Johnny Wanderfrom comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Johnny Wander'
language = 'en'
url = 'http://www.johnnywander.com/'
start_date = '2008-09-30'
history_capable_days = 40
schedule = 'Tu,Th'
time_zone = -8
rights = 'Yuko Ota & Ananth Panagariya'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.johnnywander.com/feed')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('title='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
<commit_before><commit_msg>Add crawler for Johnny Wander<commit_after>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Johnny Wander'
language = 'en'
url = 'http://www.johnnywander.com/'
start_date = '2008-09-30'
history_capable_days = 40
schedule = 'Tu,Th'
time_zone = -8
rights = 'Yuko Ota & Ananth Panagariya'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.johnnywander.com/feed')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('title='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
|
cd97d293343f59b28c59ec88c1f80ebb4697a3f2
|
protein_preview_generation/protein_preview_generation.py
|
protein_preview_generation/protein_preview_generation.py
|
##This script takes a path with PDB files, an output path and a csv file as inputs, and creates a high quality static image
#of each uniprot in the csv file in the right orientation, coloured by secondary structure
#The CSV file contains a column with uniprot ids, a column with the pdb file that contains the best chain for that uniprot, and the chain ID of that chain within the file
#(this csv file was generated with the script "generate_uniprot_beststructure_list.py")
##Environment variables for the three paths (reference file path, pdb files path and output path) must be set before running this script.
##MODULES IMPORT
import csv
import os
##FUNCTION DECLARATION
def create_picture_chain(uniprot_id,filepath,chain,out_path, count):
###This function takes a PDB filename and a chain identifier and generates previews of that chain in the best possible orientation
###The previews are stored in the out_path provided as input
cmd.load(filepath)
cmd.hide("everything")
cmd.show("cartoon", "chain " + chain)
cmd.orient("chain " + chain)
cmd.color("red", "ss h")
cmd.color("yellow", "ss s")
cmd.color("green", "ss l")
cmd.ray(1920,1080)
cmd.png(out_path + uniprot_id, dpi="300")
count = count + 1
cmd.delete("all")
count = count + 1
return count
####End of function
##SCRIPT SETTINGS
reference_path = os.environ['PERSONAL_DATA'] + "/uniprot2bestfile.csv"
files_path = os.environ['DATA'] + "/pdb_interactome3d_mod/"
out_path = "/data/protein_previews/"
count = 0
##GENERAL PYMOL SETTINGS
cmd.bg_color("white")
cmd.set("cartoon_fancy_helices", 1)
cmd.set("cartoon_fancy_sheets", 1)
cmd.set("cartoon_highlight_color", "lightorange")
cmd.set("cartoon_discrete_colors", 1)
cmd.set("ray_trace_mode", 0)
#PARSING OF THE CSV INPUT FILE
ref_file = open(reference_path, "r")
reader = csv.reader(ref_file)
for row in reader:
uniprot_id = row[0]
filepath = files_path + row[1]
chain = row[2]
count = create_picture_chain(uniprot_id,filepath,chain,out_path,count)
|
Add protein preview generation script
|
Add protein preview generation script
|
Python
|
mit
|
jtapial/snvdb,jtapial/snvdb
|
Add protein preview generation script
|
##This script takes a path with PDB files, an output path and a csv file as inputs, and creates a high quality static image
#of each uniprot in the csv file in the right orientation, coloured by secondary structure
#The CSV file contains a column with uniprot ids, a column with the pdb file that contains the best chain for that uniprot, and the chain ID of that chain within the file
#(this csv file was generated with the script "generate_uniprot_beststructure_list.py")
##Environment variables for the three paths (reference file path, pdb files path and output path) must be set before running this script.
##MODULES IMPORT
import csv
import os
##FUNCTION DECLARATION
def create_picture_chain(uniprot_id,filepath,chain,out_path, count):
###This function takes a PDB filename and a chain identifier and generates previews of that chain in the best possible orientation
###The previews are stored in the out_path provided as input
cmd.load(filepath)
cmd.hide("everything")
cmd.show("cartoon", "chain " + chain)
cmd.orient("chain " + chain)
cmd.color("red", "ss h")
cmd.color("yellow", "ss s")
cmd.color("green", "ss l")
cmd.ray(1920,1080)
cmd.png(out_path + uniprot_id, dpi="300")
count = count + 1
cmd.delete("all")
count = count + 1
return count
####End of function
##SCRIPT SETTINGS
reference_path = os.environ['PERSONAL_DATA'] + "/uniprot2bestfile.csv"
files_path = os.environ['DATA'] + "/pdb_interactome3d_mod/"
out_path = "/data/protein_previews/"
count = 0
##GENERAL PYMOL SETTINGS
cmd.bg_color("white")
cmd.set("cartoon_fancy_helices", 1)
cmd.set("cartoon_fancy_sheets", 1)
cmd.set("cartoon_highlight_color", "lightorange")
cmd.set("cartoon_discrete_colors", 1)
cmd.set("ray_trace_mode", 0)
#PARSING OF THE CSV INPUT FILE
ref_file = open(reference_path, "r")
reader = csv.reader(ref_file)
for row in reader:
uniprot_id = row[0]
filepath = files_path + row[1]
chain = row[2]
count = create_picture_chain(uniprot_id,filepath,chain,out_path,count)
|
<commit_before><commit_msg>Add protein preview generation script<commit_after>
|
##This script takes a path with PDB files, an output path and a csv file as inputs, and creates a high quality static image
#of each uniprot in the csv file in the right orientation, coloured by secondary structure
#The CSV file contains a column with uniprot ids, a column with the pdb file that contains the best chain for that uniprot, and the chain ID of that chain within the file
#(this csv file was generated with the script "generate_uniprot_beststructure_list.py")
##Environment variables for the three paths (reference file path, pdb files path and output path) must be set before running this script.
##MODULES IMPORT
import csv
import os
##FUNCTION DECLARATION
def create_picture_chain(uniprot_id,filepath,chain,out_path, count):
###This function takes a PDB filename and a chain identifier and generates previews of that chain in the best possible orientation
###The previews are stored in the out_path provided as input
cmd.load(filepath)
cmd.hide("everything")
cmd.show("cartoon", "chain " + chain)
cmd.orient("chain " + chain)
cmd.color("red", "ss h")
cmd.color("yellow", "ss s")
cmd.color("green", "ss l")
cmd.ray(1920,1080)
cmd.png(out_path + uniprot_id, dpi="300")
count = count + 1
cmd.delete("all")
count = count + 1
return count
####End of function
##SCRIPT SETTINGS
reference_path = os.environ['PERSONAL_DATA'] + "/uniprot2bestfile.csv"
files_path = os.environ['DATA'] + "/pdb_interactome3d_mod/"
out_path = "/data/protein_previews/"
count = 0
##GENERAL PYMOL SETTINGS
cmd.bg_color("white")
cmd.set("cartoon_fancy_helices", 1)
cmd.set("cartoon_fancy_sheets", 1)
cmd.set("cartoon_highlight_color", "lightorange")
cmd.set("cartoon_discrete_colors", 1)
cmd.set("ray_trace_mode", 0)
#PARSING OF THE CSV INPUT FILE
ref_file = open(reference_path, "r")
reader = csv.reader(ref_file)
for row in reader:
uniprot_id = row[0]
filepath = files_path + row[1]
chain = row[2]
count = create_picture_chain(uniprot_id,filepath,chain,out_path,count)
|
Add protein preview generation script##This script takes a path with PDB files, an output path and a csv file as inputs, and creates a high quality static image
#of each uniprot in the csv file in the right orientation, coloured by secondary structure
#The CSV file contains a column with uniprot ids, a column with the pdb file that contains the best chain for that uniprot, and the chain ID of that chain within the file
#(this csv file was generated with the script "generate_uniprot_beststructure_list.py")
##Environment variables for the three paths (reference file path, pdb files path and output path) must be set before running this script.
##MODULES IMPORT
import csv
import os
##FUNCTION DECLARATION
def create_picture_chain(uniprot_id,filepath,chain,out_path, count):
###This function takes a PDB filename and a chain identifier and generates previews of that chain in the best possible orientation
###The previews are stored in the out_path provided as input
cmd.load(filepath)
cmd.hide("everything")
cmd.show("cartoon", "chain " + chain)
cmd.orient("chain " + chain)
cmd.color("red", "ss h")
cmd.color("yellow", "ss s")
cmd.color("green", "ss l")
cmd.ray(1920,1080)
cmd.png(out_path + uniprot_id, dpi="300")
count = count + 1
cmd.delete("all")
count = count + 1
return count
####End of function
##SCRIPT SETTINGS
reference_path = os.environ['PERSONAL_DATA'] + "/uniprot2bestfile.csv"
files_path = os.environ['DATA'] + "/pdb_interactome3d_mod/"
out_path = "/data/protein_previews/"
count = 0
##GENERAL PYMOL SETTINGS
cmd.bg_color("white")
cmd.set("cartoon_fancy_helices", 1)
cmd.set("cartoon_fancy_sheets", 1)
cmd.set("cartoon_highlight_color", "lightorange")
cmd.set("cartoon_discrete_colors", 1)
cmd.set("ray_trace_mode", 0)
#PARSING OF THE CSV INPUT FILE
ref_file = open(reference_path, "r")
reader = csv.reader(ref_file)
for row in reader:
uniprot_id = row[0]
filepath = files_path + row[1]
chain = row[2]
count = create_picture_chain(uniprot_id,filepath,chain,out_path,count)
|
<commit_before><commit_msg>Add protein preview generation script<commit_after>##This script takes a path with PDB files, an output path and a csv file as inputs, and creates a high quality static image
#of each uniprot in the csv file in the right orientation, coloured by secondary structure
#The CSV file contains a column with uniprot ids, a column with the pdb file that contains the best chain for that uniprot, and the chain ID of that chain within the file
#(this csv file was generated with the script "generate_uniprot_beststructure_list.py")
##Environment variables for the three paths (reference file path, pdb files path and output path) must be set before running this script.
##MODULES IMPORT
import csv
import os
##FUNCTION DECLARATION
def create_picture_chain(uniprot_id,filepath,chain,out_path, count):
###This function takes a PDB filename and a chain identifier and generates previews of that chain in the best possible orientation
###The previews are stored in the out_path provided as input
cmd.load(filepath)
cmd.hide("everything")
cmd.show("cartoon", "chain " + chain)
cmd.orient("chain " + chain)
cmd.color("red", "ss h")
cmd.color("yellow", "ss s")
cmd.color("green", "ss l")
cmd.ray(1920,1080)
cmd.png(out_path + uniprot_id, dpi="300")
count = count + 1
cmd.delete("all")
count = count + 1
return count
####End of function
##SCRIPT SETTINGS
reference_path = os.environ['PERSONAL_DATA'] + "/uniprot2bestfile.csv"
files_path = os.environ['DATA'] + "/pdb_interactome3d_mod/"
out_path = "/data/protein_previews/"
count = 0
##GENERAL PYMOL SETTINGS
cmd.bg_color("white")
cmd.set("cartoon_fancy_helices", 1)
cmd.set("cartoon_fancy_sheets", 1)
cmd.set("cartoon_highlight_color", "lightorange")
cmd.set("cartoon_discrete_colors", 1)
cmd.set("ray_trace_mode", 0)
#PARSING OF THE CSV INPUT FILE
ref_file = open(reference_path, "r")
reader = csv.reader(ref_file)
for row in reader:
uniprot_id = row[0]
filepath = files_path + row[1]
chain = row[2]
count = create_picture_chain(uniprot_id,filepath,chain,out_path,count)
|
|
213708504447c8858ed3ba86324acaff98dbefdf
|
seahub/profile/models.py
|
seahub/profile/models.py
|
from django.db import models
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
|
from django.db import models
from django.core.cache import cache
from django.dispatch import receiver
from settings import EMAIL_ID_CACHE_PREFIX, EMAIL_ID_CACHE_TIMEOUT
from registration.signals import user_registered
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
@receiver(user_registered)
def clean_email_id_cache(sender, **kwargs):
user = kwargs['user']
cache.set(EMAIL_ID_CACHE_PREFIX+user.email, user.id, EMAIL_ID_CACHE_TIMEOUT)
|
Update email_id cache when user has registered
|
Update email_id cache when user has registered
|
Python
|
apache-2.0
|
madflow/seahub,cloudcopy/seahub,Chilledheart/seahub,cloudcopy/seahub,miurahr/seahub,Chilledheart/seahub,cloudcopy/seahub,miurahr/seahub,madflow/seahub,madflow/seahub,cloudcopy/seahub,miurahr/seahub,Chilledheart/seahub,madflow/seahub,madflow/seahub,Chilledheart/seahub,Chilledheart/seahub,miurahr/seahub
|
from django.db import models
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
Update email_id cache when user has registered
|
from django.db import models
from django.core.cache import cache
from django.dispatch import receiver
from settings import EMAIL_ID_CACHE_PREFIX, EMAIL_ID_CACHE_TIMEOUT
from registration.signals import user_registered
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
@receiver(user_registered)
def clean_email_id_cache(sender, **kwargs):
user = kwargs['user']
cache.set(EMAIL_ID_CACHE_PREFIX+user.email, user.id, EMAIL_ID_CACHE_TIMEOUT)
|
<commit_before>from django.db import models
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
<commit_msg>Update email_id cache when user has registered<commit_after>
|
from django.db import models
from django.core.cache import cache
from django.dispatch import receiver
from settings import EMAIL_ID_CACHE_PREFIX, EMAIL_ID_CACHE_TIMEOUT
from registration.signals import user_registered
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
@receiver(user_registered)
def clean_email_id_cache(sender, **kwargs):
user = kwargs['user']
cache.set(EMAIL_ID_CACHE_PREFIX+user.email, user.id, EMAIL_ID_CACHE_TIMEOUT)
|
from django.db import models
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
Update email_id cache when user has registeredfrom django.db import models
from django.core.cache import cache
from django.dispatch import receiver
from settings import EMAIL_ID_CACHE_PREFIX, EMAIL_ID_CACHE_TIMEOUT
from registration.signals import user_registered
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
@receiver(user_registered)
def clean_email_id_cache(sender, **kwargs):
user = kwargs['user']
cache.set(EMAIL_ID_CACHE_PREFIX+user.email, user.id, EMAIL_ID_CACHE_TIMEOUT)
|
<commit_before>from django.db import models
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
<commit_msg>Update email_id cache when user has registered<commit_after>from django.db import models
from django.core.cache import cache
from django.dispatch import receiver
from settings import EMAIL_ID_CACHE_PREFIX, EMAIL_ID_CACHE_TIMEOUT
from registration.signals import user_registered
class Profile(models.Model):
user = models.EmailField(unique=True)
nickname = models.CharField(max_length=64, blank=True)
intro = models.TextField(max_length=256, blank=True)
@receiver(user_registered)
def clean_email_id_cache(sender, **kwargs):
user = kwargs['user']
cache.set(EMAIL_ID_CACHE_PREFIX+user.email, user.id, EMAIL_ID_CACHE_TIMEOUT)
|
25215f3b353122ccb4a20b00a9fdcdaca81f3ef8
|
other/vlen_leak.py
|
other/vlen_leak.py
|
"""
Demonstrates memory leak involving variable-length strings.
"""
import sys
import resource
import numpy as np
import h5py
FNAME = 'test.hdf5'
if 'linux' in sys.platform:
MAXRSS_BYTES = 1024. # in KiB on linux
else:
MAXRSS_BYTES = 1.
if sys.version_info[0] == 3:
xrange = range
memory = 0
def print_memory():
global memory
rubytes = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss*MAXRSS_BYTES
print ("%.2f MB (%.2f since last call)" % (rubytes/(1024.**2), (rubytes-memory)/(1024.**2)))
memory = rubytes
dt = h5py.special_dtype(vlen=bytes)
data = np.array(["%d"*100 for idx in xrange(1000)])
def ds_leak():
print("Testing vlens for dataset r/w")
print("-----------------------------")
with h5py.File(FNAME,'w') as f:
ds = f.create_dataset('dset', (1000,), dtype=dt)
for idx in xrange(500):
#print idx
if idx%100 == 0:
print_memory()
ds[...] = data
ds[...]
def attr_leak():
print("Testing vlens for attribute r/w")
print("-------------------------------")
with h5py.File(FNAME,'w') as f:
for idx in xrange(500):
if idx%100 == 0:
print_memory()
f.attrs.create('foo', dtype=dt, data=data)
f.attrs['foo']
if __name__ == '__main__':
print("h5py ", h5py.version.version)
print("HDF5 ", h5py.version.hdf5_version)
attr_leak()
ds_leak()
|
Add script demonstrating vlen memory leak
|
Add script demonstrating vlen memory leak
|
Python
|
bsd-3-clause
|
h5py/h5py,h5py/h5py,h5py/h5py
|
Add script demonstrating vlen memory leak
|
"""
Demonstrates memory leak involving variable-length strings.
"""
import sys
import resource
import numpy as np
import h5py
FNAME = 'test.hdf5'
if 'linux' in sys.platform:
MAXRSS_BYTES = 1024. # in KiB on linux
else:
MAXRSS_BYTES = 1.
if sys.version_info[0] == 3:
xrange = range
memory = 0
def print_memory():
global memory
rubytes = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss*MAXRSS_BYTES
print ("%.2f MB (%.2f since last call)" % (rubytes/(1024.**2), (rubytes-memory)/(1024.**2)))
memory = rubytes
dt = h5py.special_dtype(vlen=bytes)
data = np.array(["%d"*100 for idx in xrange(1000)])
def ds_leak():
print("Testing vlens for dataset r/w")
print("-----------------------------")
with h5py.File(FNAME,'w') as f:
ds = f.create_dataset('dset', (1000,), dtype=dt)
for idx in xrange(500):
#print idx
if idx%100 == 0:
print_memory()
ds[...] = data
ds[...]
def attr_leak():
print("Testing vlens for attribute r/w")
print("-------------------------------")
with h5py.File(FNAME,'w') as f:
for idx in xrange(500):
if idx%100 == 0:
print_memory()
f.attrs.create('foo', dtype=dt, data=data)
f.attrs['foo']
if __name__ == '__main__':
print("h5py ", h5py.version.version)
print("HDF5 ", h5py.version.hdf5_version)
attr_leak()
ds_leak()
|
<commit_before><commit_msg>Add script demonstrating vlen memory leak<commit_after>
|
"""
Demonstrates memory leak involving variable-length strings.
"""
import sys
import resource
import numpy as np
import h5py
FNAME = 'test.hdf5'
if 'linux' in sys.platform:
MAXRSS_BYTES = 1024. # in KiB on linux
else:
MAXRSS_BYTES = 1.
if sys.version_info[0] == 3:
xrange = range
memory = 0
def print_memory():
global memory
rubytes = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss*MAXRSS_BYTES
print ("%.2f MB (%.2f since last call)" % (rubytes/(1024.**2), (rubytes-memory)/(1024.**2)))
memory = rubytes
dt = h5py.special_dtype(vlen=bytes)
data = np.array(["%d"*100 for idx in xrange(1000)])
def ds_leak():
print("Testing vlens for dataset r/w")
print("-----------------------------")
with h5py.File(FNAME,'w') as f:
ds = f.create_dataset('dset', (1000,), dtype=dt)
for idx in xrange(500):
#print idx
if idx%100 == 0:
print_memory()
ds[...] = data
ds[...]
def attr_leak():
print("Testing vlens for attribute r/w")
print("-------------------------------")
with h5py.File(FNAME,'w') as f:
for idx in xrange(500):
if idx%100 == 0:
print_memory()
f.attrs.create('foo', dtype=dt, data=data)
f.attrs['foo']
if __name__ == '__main__':
print("h5py ", h5py.version.version)
print("HDF5 ", h5py.version.hdf5_version)
attr_leak()
ds_leak()
|
Add script demonstrating vlen memory leak
"""
Demonstrates memory leak involving variable-length strings.
"""
import sys
import resource
import numpy as np
import h5py
FNAME = 'test.hdf5'
if 'linux' in sys.platform:
MAXRSS_BYTES = 1024. # in KiB on linux
else:
MAXRSS_BYTES = 1.
if sys.version_info[0] == 3:
xrange = range
memory = 0
def print_memory():
global memory
rubytes = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss*MAXRSS_BYTES
print ("%.2f MB (%.2f since last call)" % (rubytes/(1024.**2), (rubytes-memory)/(1024.**2)))
memory = rubytes
dt = h5py.special_dtype(vlen=bytes)
data = np.array(["%d"*100 for idx in xrange(1000)])
def ds_leak():
print("Testing vlens for dataset r/w")
print("-----------------------------")
with h5py.File(FNAME,'w') as f:
ds = f.create_dataset('dset', (1000,), dtype=dt)
for idx in xrange(500):
#print idx
if idx%100 == 0:
print_memory()
ds[...] = data
ds[...]
def attr_leak():
print("Testing vlens for attribute r/w")
print("-------------------------------")
with h5py.File(FNAME,'w') as f:
for idx in xrange(500):
if idx%100 == 0:
print_memory()
f.attrs.create('foo', dtype=dt, data=data)
f.attrs['foo']
if __name__ == '__main__':
print("h5py ", h5py.version.version)
print("HDF5 ", h5py.version.hdf5_version)
attr_leak()
ds_leak()
|
<commit_before><commit_msg>Add script demonstrating vlen memory leak<commit_after>
"""
Demonstrates memory leak involving variable-length strings.
"""
import sys
import resource
import numpy as np
import h5py
FNAME = 'test.hdf5'
if 'linux' in sys.platform:
MAXRSS_BYTES = 1024. # in KiB on linux
else:
MAXRSS_BYTES = 1.
if sys.version_info[0] == 3:
xrange = range
memory = 0
def print_memory():
global memory
rubytes = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss*MAXRSS_BYTES
print ("%.2f MB (%.2f since last call)" % (rubytes/(1024.**2), (rubytes-memory)/(1024.**2)))
memory = rubytes
dt = h5py.special_dtype(vlen=bytes)
data = np.array(["%d"*100 for idx in xrange(1000)])
def ds_leak():
print("Testing vlens for dataset r/w")
print("-----------------------------")
with h5py.File(FNAME,'w') as f:
ds = f.create_dataset('dset', (1000,), dtype=dt)
for idx in xrange(500):
#print idx
if idx%100 == 0:
print_memory()
ds[...] = data
ds[...]
def attr_leak():
print("Testing vlens for attribute r/w")
print("-------------------------------")
with h5py.File(FNAME,'w') as f:
for idx in xrange(500):
if idx%100 == 0:
print_memory()
f.attrs.create('foo', dtype=dt, data=data)
f.attrs['foo']
if __name__ == '__main__':
print("h5py ", h5py.version.version)
print("HDF5 ", h5py.version.hdf5_version)
attr_leak()
ds_leak()
|
|
76d5056c24daf3a61201f6b171a21fc879c0e602
|
tests/test_migrations.py
|
tests/test_migrations.py
|
import copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument, RawDocument
from scrapi import tasks
from scrapi.migrations import rename
from scrapi import registry
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
# test_elastic = ElasticsearchProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
# test_elastic.process_raw(RAW)
# test_elastic.process_normalized(RAW, NORMALIZED, index='test')
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
# test_info = copy.deepcopy(registry['test'])
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert (len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
|
Add migration test for rename
|
Add migration test for rename
|
Python
|
apache-2.0
|
fabianvf/scrapi,fabianvf/scrapi,icereval/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,alexgarciac/scrapi,erinspace/scrapi,felliott/scrapi,jeffreyliu3230/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi
|
Add migration test for rename
|
import copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument, RawDocument
from scrapi import tasks
from scrapi.migrations import rename
from scrapi import registry
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
# test_elastic = ElasticsearchProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
# test_elastic.process_raw(RAW)
# test_elastic.process_normalized(RAW, NORMALIZED, index='test')
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
# test_info = copy.deepcopy(registry['test'])
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert (len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
|
<commit_before><commit_msg>Add migration test for rename<commit_after>
|
import copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument, RawDocument
from scrapi import tasks
from scrapi.migrations import rename
from scrapi import registry
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
# test_elastic = ElasticsearchProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
# test_elastic.process_raw(RAW)
# test_elastic.process_normalized(RAW, NORMALIZED, index='test')
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
# test_info = copy.deepcopy(registry['test'])
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert (len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
|
Add migration test for renameimport copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument, RawDocument
from scrapi import tasks
from scrapi.migrations import rename
from scrapi import registry
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
# test_elastic = ElasticsearchProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
# test_elastic.process_raw(RAW)
# test_elastic.process_normalized(RAW, NORMALIZED, index='test')
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
# test_info = copy.deepcopy(registry['test'])
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert (len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
|
<commit_before><commit_msg>Add migration test for rename<commit_after>import copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument, RawDocument
from scrapi import tasks
from scrapi.migrations import rename
from scrapi import registry
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
# test_elastic = ElasticsearchProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
# test_elastic.process_raw(RAW)
# test_elastic.process_normalized(RAW, NORMALIZED, index='test')
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
# test_info = copy.deepcopy(registry['test'])
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert (len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
|
|
1397791e663f57927524ccf7a25915e8059d7376
|
tests/provider/test_article_structure.py
|
tests/provider/test_article_structure.py
|
import unittest
from ddt import ddt, data, unpack
from provider.article_structure import ArticleInfo
@ddt
class TestArticleStructure(unittest.TestCase):
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected':'2012-10-15T00:00:00Z'})
def test_get_update_date_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_update_date_from_zip_filename()
self.assertEqual(result, expected)
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected': '1'})
def test_get_version_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_version_from_zip_filename()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
Test article structure replacing lettuce tests
|
Test article structure replacing lettuce tests
|
Python
|
mit
|
gnott/elife-bot,jhroot/elife-bot,jhroot/elife-bot,gnott/elife-bot,gnott/elife-bot,jhroot/elife-bot
|
Test article structure replacing lettuce tests
|
import unittest
from ddt import ddt, data, unpack
from provider.article_structure import ArticleInfo
@ddt
class TestArticleStructure(unittest.TestCase):
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected':'2012-10-15T00:00:00Z'})
def test_get_update_date_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_update_date_from_zip_filename()
self.assertEqual(result, expected)
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected': '1'})
def test_get_version_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_version_from_zip_filename()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test article structure replacing lettuce tests<commit_after>
|
import unittest
from ddt import ddt, data, unpack
from provider.article_structure import ArticleInfo
@ddt
class TestArticleStructure(unittest.TestCase):
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected':'2012-10-15T00:00:00Z'})
def test_get_update_date_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_update_date_from_zip_filename()
self.assertEqual(result, expected)
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected': '1'})
def test_get_version_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_version_from_zip_filename()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
Test article structure replacing lettuce testsimport unittest
from ddt import ddt, data, unpack
from provider.article_structure import ArticleInfo
@ddt
class TestArticleStructure(unittest.TestCase):
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected':'2012-10-15T00:00:00Z'})
def test_get_update_date_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_update_date_from_zip_filename()
self.assertEqual(result, expected)
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected': '1'})
def test_get_version_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_version_from_zip_filename()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test article structure replacing lettuce tests<commit_after>import unittest
from ddt import ddt, data, unpack
from provider.article_structure import ArticleInfo
@ddt
class TestArticleStructure(unittest.TestCase):
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected':'2012-10-15T00:00:00Z'})
def test_get_update_date_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_update_date_from_zip_filename()
self.assertEqual(result, expected)
@unpack
@data({'input': 'elife-07702-vor-r4.zip', 'expected': None},
{'input': 'elife-00013-vor-v1-20121015000000.zip', 'expected': '1'})
def test_get_version_from_zip_filename(self, input, expected):
self.articleinfo = ArticleInfo(input)
result = self.articleinfo.get_version_from_zip_filename()
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
|
9388754e53c0d2898a6b1bfbc64f3d5c68c1f8d1
|
scripts/box/connect_external_accounts.py
|
scripts/box/connect_external_accounts.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
First run
rm -rf website/addons/box/views/
Then change the user_settings field of BoxNodeSettings to foreign_user_settings
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.box.model import BoxNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in BoxNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
user = user_addon.owner
if not user_addon.external_accounts:
logger.warning('User {0} has no box external account'.format(user._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Add script to connect ExternalAccounts and folders -after the pre-merge migrations have been done
|
Add script to connect ExternalAccounts and folders
-after the pre-merge migrations have been done
|
Python
|
apache-2.0
|
RomanZWang/osf.io,RomanZWang/osf.io,amyshi188/osf.io,GageGaskins/osf.io,baylee-d/osf.io,cwisecarver/osf.io,adlius/osf.io,leb2dg/osf.io,jnayak1/osf.io,erinspace/osf.io,acshi/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,wearpants/osf.io,zamattiac/osf.io,mluke93/osf.io,KAsante95/osf.io,leb2dg/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,wearpants/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,caneruguz/osf.io,adlius/osf.io,emetsger/osf.io,abought/osf.io,mattclark/osf.io,jnayak1/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,laurenrevere/osf.io,cslzchen/osf.io,samanehsan/osf.io,zachjanicki/osf.io,sloria/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,hmoco/osf.io,mattclark/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,adlius/osf.io,GageGaskins/osf.io,binoculars/osf.io,samchrisinger/osf.io,abought/osf.io,KAsante95/osf.io,cslzchen/osf.io,laurenrevere/osf.io,abought/osf.io,aaxelb/osf.io,GageGaskins/osf.io,SSJohns/osf.io,GageGaskins/osf.io,danielneis/osf.io,sloria/osf.io,caseyrygt/osf.io,kwierman/osf.io,chrisseto/osf.io,rdhyee/osf.io,KAsante95/osf.io,RomanZWang/osf.io,doublebits/osf.io,caneruguz/osf.io,erinspace/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,pattisdr/osf.io,emetsger/osf.io,caneruguz/osf.io,chrisseto/osf.io,rdhyee/osf.io,icereval/osf.io,asanfilippo7/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,njantrania/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,amyshi188/osf.io,saradbowman/osf.io,njantrania/osf.io,alexschiller/osf.io,zamattiac/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,alexschiller/osf.io,samchrisinger/osf.io,chennan47/osf.io,abought/osf.io,acshi/osf.io,mluo613/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,caseyrollins/osf.io,mluke93/osf.io,TomBaxter/osf.io,RomanZWang/osf.io,hmoco/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,danielneis/osf.io,TomHeatwole/osf.io,danielneis/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,acshi/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,mluo613/osf.io,billyhunt/osf.io,alexschiller/osf.io,felliott/osf.io,zamattiac/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,kwierman/osf.io,cslzchen/osf.io,crcresearch/osf.io,jnayak1/osf.io,cwisecarver/osf.io,crcresearch/osf.io,Nesiehr/osf.io,samanehsan/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,mluo613/osf.io,acshi/osf.io,Johnetordoff/osf.io,cosenal/osf.io,njantrania/osf.io,cosenal/osf.io,doublebits/osf.io,kch8qx/osf.io,billyhunt/osf.io,caseyrygt/osf.io,cosenal/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,hmoco/osf.io,caseyrygt/osf.io,kwierman/osf.io,mfraezz/osf.io,samanehsan/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,baylee-d/osf.io,acshi/osf.io,KAsante95/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,billyhunt/osf.io,amyshi188/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,emetsger/osf.io,TomHeatwole/osf.io,baylee-d/osf.io,aaxelb/osf.io,doublebits/osf.io,amyshi188/osf.io,mluo613/osf.io,RomanZWang/osf.io,kwierman/osf.io,mluke93/osf.io,cwisecarver/osf.io,kch8qx/osf.io,SSJohns/osf.io,mattclark/osf.io,brandonPurvis/osf.io,adlius/osf.io,brandonPurvis/osf.io,sloria/osf.io,cosenal/osf.io,Ghalko/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,leb2dg/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,binoculars/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,wearpants/osf.io,rdhyee/osf.io,mfraezz/osf.io,hmoco/osf.io,doublebits/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,chrisseto/osf.io,samanehsan/osf.io,erinspace/osf.io,chennan47/osf.io,icereval/osf.io,wearpants/osf.io,binoculars/osf.io,njantrania/osf.io,icereval/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,kch8qx/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,KAsante95/osf.io,danielneis/osf.io,mfraezz/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,emetsger/osf.io,felliott/osf.io,saradbowman/osf.io,cwisecarver/osf.io,felliott/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,SSJohns/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io
|
Add script to connect ExternalAccounts and folders
-after the pre-merge migrations have been done
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
First run
rm -rf website/addons/box/views/
Then change the user_settings field of BoxNodeSettings to foreign_user_settings
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.box.model import BoxNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in BoxNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
user = user_addon.owner
if not user_addon.external_accounts:
logger.warning('User {0} has no box external account'.format(user._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
<commit_before><commit_msg>Add script to connect ExternalAccounts and folders
-after the pre-merge migrations have been done<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
First run
rm -rf website/addons/box/views/
Then change the user_settings field of BoxNodeSettings to foreign_user_settings
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.box.model import BoxNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in BoxNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
user = user_addon.owner
if not user_addon.external_accounts:
logger.warning('User {0} has no box external account'.format(user._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Add script to connect ExternalAccounts and folders
-after the pre-merge migrations have been done#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
First run
rm -rf website/addons/box/views/
Then change the user_settings field of BoxNodeSettings to foreign_user_settings
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.box.model import BoxNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in BoxNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
user = user_addon.owner
if not user_addon.external_accounts:
logger.warning('User {0} has no box external account'.format(user._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
<commit_before><commit_msg>Add script to connect ExternalAccounts and folders
-after the pre-merge migrations have been done<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
First run
rm -rf website/addons/box/views/
Then change the user_settings field of BoxNodeSettings to foreign_user_settings
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.box.model import BoxNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in BoxNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
user = user_addon.owner
if not user_addon.external_accounts:
logger.warning('User {0} has no box external account'.format(user._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
|
9e89e35d1071a2123c9fa8a28bf538f2e34daeeb
|
bumblebee/modules/cpu.py
|
bumblebee/modules/cpu.py
|
import bumblebee.module
import psutil
class Module(bumblebee.module.Module):
def __init__(self, args):
super(Module, self).__init__(args)
self._perc = psutil.cpu_percent(percpu=False)
def data(self):
self._perc = psutil.cpu_percent(percpu=False)
return "{:05.02f}%".format(self._perc)
def warning(self):
return self._perc > 70
def critical(self):
return self._perc > 80
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Add module for measuring CPU utilization
|
[modules] Add module for measuring CPU utilization
Add module "cpu", which uses psutil to measure CPU utilization between
two consecutive calls.
|
Python
|
mit
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
[modules] Add module for measuring CPU utilization
Add module "cpu", which uses psutil to measure CPU utilization between
two consecutive calls.
|
import bumblebee.module
import psutil
class Module(bumblebee.module.Module):
def __init__(self, args):
super(Module, self).__init__(args)
self._perc = psutil.cpu_percent(percpu=False)
def data(self):
self._perc = psutil.cpu_percent(percpu=False)
return "{:05.02f}%".format(self._perc)
def warning(self):
return self._perc > 70
def critical(self):
return self._perc > 80
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before><commit_msg>[modules] Add module for measuring CPU utilization
Add module "cpu", which uses psutil to measure CPU utilization between
two consecutive calls.<commit_after>
|
import bumblebee.module
import psutil
class Module(bumblebee.module.Module):
def __init__(self, args):
super(Module, self).__init__(args)
self._perc = psutil.cpu_percent(percpu=False)
def data(self):
self._perc = psutil.cpu_percent(percpu=False)
return "{:05.02f}%".format(self._perc)
def warning(self):
return self._perc > 70
def critical(self):
return self._perc > 80
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
[modules] Add module for measuring CPU utilization
Add module "cpu", which uses psutil to measure CPU utilization between
two consecutive calls.import bumblebee.module
import psutil
class Module(bumblebee.module.Module):
def __init__(self, args):
super(Module, self).__init__(args)
self._perc = psutil.cpu_percent(percpu=False)
def data(self):
self._perc = psutil.cpu_percent(percpu=False)
return "{:05.02f}%".format(self._perc)
def warning(self):
return self._perc > 70
def critical(self):
return self._perc > 80
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before><commit_msg>[modules] Add module for measuring CPU utilization
Add module "cpu", which uses psutil to measure CPU utilization between
two consecutive calls.<commit_after>import bumblebee.module
import psutil
class Module(bumblebee.module.Module):
def __init__(self, args):
super(Module, self).__init__(args)
self._perc = psutil.cpu_percent(percpu=False)
def data(self):
self._perc = psutil.cpu_percent(percpu=False)
return "{:05.02f}%".format(self._perc)
def warning(self):
return self._perc > 70
def critical(self):
return self._perc > 80
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
|
6e723142d28aafb0bb925cb3456fd962b679349b
|
app/applier.py
|
app/applier.py
|
import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
Create skeleton for new rule chooser
|
Create skeleton for new rule chooser
|
Python
|
mit
|
kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve
|
Create skeleton for new rule chooser
|
import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
<commit_before><commit_msg>Create skeleton for new rule chooser<commit_after>
|
import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
Create skeleton for new rule chooserimport random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
<commit_before><commit_msg>Create skeleton for new rule chooser<commit_after>import random
from collections import namedtuple
Rule = namedtuple('Rule', ['changes', 'environments'])
sonorization = Rule({'p': 'b', 't': 'd', 'ʈ': 'ɖ', 'c':'ɟ', 'k': 'g', 'q': 'ɢ'},
['^.', 'V.V'])
rules = [sonorization]
words = ['potato', 'tobado', 'tabasco']
def choose_rule(words, rules):
'''Returns a sound change rule from rules applicable to the given word list.'''
filtered_rules = filter_rules_by_phonemes(words, rules)
filtered_rules = filter_rules_by_environments(words, filtered_rules)
# selected_rule = random.choice(filtered_rules)
def filter_rules_by_phonemes(words, rules):
'''Returns a list of rules which contain phonemes that are present in the given
word list.
'''
pass
def filter_rules_by_environments(words, rules):
'''Returns a list of rules which apply to at least one word in the given word
list, taking into account the environments in which the rule applies.
'''
pass
if __name__ == '__main__':
choose_rule(words, rules)
|
|
e1c5891650eaf3934a082e52934a1c4dd113fee7
|
doc/quickstart/testlibs/LoginLibrary.py
|
doc/quickstart/testlibs/LoginLibrary.py
|
import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
|
import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
Use subprocess isntead of popen to get Jython working too
|
Use subprocess isntead of popen to get Jython working too
|
Python
|
apache-2.0
|
ChrisHirsch/robotframework,alexandrul-ci/robotframework,yonglehou/robotframework,JackNokia/robotframework,edbrannin/robotframework,jaloren/robotframework,un33k/robotframework,Colorfulstan/robotframework,kyle1986/robortframe,synsun/robotframework,userzimmermann/robotframework,userzimmermann/robotframework,un33k/robotframework,alexandrul-ci/robotframework,stasiek/robotframework,Colorfulstan/robotframework,SivagnanamCiena/robotframework,stasiek/robotframework,Colorfulstan/robotframework,kyle1986/robortframe,jaloren/robotframework,suvarnaraju/robotframework,wojciechtanski/robotframework,yahman72/robotframework,kyle1986/robortframe,synsun/robotframework,rwarren14/robotframework,dkentw/robotframework,eric-stanley/robotframework,edbrannin/robotframework,HelioGuilherme66/robotframework,JackNokia/robotframework,Colorfulstan/robotframework,kurtdawg24/robotframework,nmrao/robotframework,fingeronthebutton/robotframework,wojciechtanski/robotframework,fingeronthebutton/robotframework,joongh/robotframework,userzimmermann/robotframework,ashishdeshpande/robotframework,fingeronthebutton/robotframework,fingeronthebutton/robotframework,stasiek/robotframework,jorik041/robotframework,xiaokeng/robotframework,rwarren14/robotframework,ChrisHirsch/robotframework,ashishdeshpande/robotframework,eric-stanley/robotframework,kyle1986/robortframe,kurtdawg24/robotframework,edbrannin/robotframework,moto-timo/robotframework,alexandrul-ci/robotframework,ashishdeshpande/robotframework,suvarnaraju/robotframework,snyderr/robotframework,eric-stanley/robotframework,joongh/robotframework,userzimmermann/robotframework,moto-timo/robotframework,ashishdeshpande/robotframework,joongh/robotframework,userzimmermann/robotframework,nmrao/robotframework,yahman72/robotframework,moto-timo/robotframework,SivagnanamCiena/robotframework,nmrao/robotframework,nmrao/robotframework,moto-timo/robotframework,edbrannin/robotframework,ChrisHirsch/robotframework,wojciechtanski/robotframework,jorik041/robotframework,yahman72/robotframework,ChrisHirsch/robotframework,jorik041/robotframework,stasiek/robotframework,xiaokeng/robotframework,snyderr/robotframework,SivagnanamCiena/robotframework,snyderr/robotframework,xiaokeng/robotframework,suvarnaraju/robotframework,yonglehou/robotframework,yonglehou/robotframework,kurtdawg24/robotframework,dkentw/robotframework,nmrao/robotframework,suvarnaraju/robotframework,synsun/robotframework,ashishdeshpande/robotframework,edbrannin/robotframework,jaloren/robotframework,un33k/robotframework,joongh/robotframework,wojciechtanski/robotframework,Colorfulstan/robotframework,yahman72/robotframework,moto-timo/robotframework,eric-stanley/robotframework,joongh/robotframework,stasiek/robotframework,alexandrul-ci/robotframework,alexandrul-ci/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,wojciechtanski/robotframework,xiaokeng/robotframework,JackNokia/robotframework,un33k/robotframework,snyderr/robotframework,xiaokeng/robotframework,robotframework/robotframework,robotframework/robotframework,jorik041/robotframework,JackNokia/robotframework,yahman72/robotframework,snyderr/robotframework,kurtdawg24/robotframework,jaloren/robotframework,jaloren/robotframework,yonglehou/robotframework,JackNokia/robotframework,yonglehou/robotframework,synsun/robotframework,dkentw/robotframework,kyle1986/robortframe,SivagnanamCiena/robotframework,rwarren14/robotframework,jorik041/robotframework,kurtdawg24/robotframework,suvarnaraju/robotframework,dkentw/robotframework,dkentw/robotframework,SivagnanamCiena/robotframework,synsun/robotframework,HelioGuilherme66/robotframework,rwarren14/robotframework,ChrisHirsch/robotframework,rwarren14/robotframework,fingeronthebutton/robotframework,un33k/robotframework
|
import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
Use subprocess isntead of popen to get Jython working too
|
import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
<commit_before>import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
<commit_msg>Use subprocess isntead of popen to get Jython working too<commit_after>
|
import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
Use subprocess isntead of popen to get Jython working tooimport os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
<commit_before>import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
<commit_msg>Use subprocess isntead of popen to get Jython working too<commit_after>import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
72e743b0ee40609ce2f469b44afe82d1574c498a
|
bin/explore.py
|
bin/explore.py
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import random, support
import matplotlib.pyplot as pp
support.figure()
app_count = support.count_apps()
user_count = support.count_users()
while True:
while True:
app = None
user = random.randrange(user_count)
data = support.select_interarrivals(app=app, user=user)
if len(data) >= 10: break
pp.clf()
pp.plot(data)
pp.title('App {}, User {}, Samples {}'.format(app, user, len(data)))
pp.pause(1)
input()
|
Add a script for the exploration of app-user pairs
|
Add a script for the exploration of app-user pairs
|
Python
|
mit
|
learning-on-chip/google-cluster-prediction
|
Add a script for the exploration of app-user pairs
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import random, support
import matplotlib.pyplot as pp
support.figure()
app_count = support.count_apps()
user_count = support.count_users()
while True:
while True:
app = None
user = random.randrange(user_count)
data = support.select_interarrivals(app=app, user=user)
if len(data) >= 10: break
pp.clf()
pp.plot(data)
pp.title('App {}, User {}, Samples {}'.format(app, user, len(data)))
pp.pause(1)
input()
|
<commit_before><commit_msg>Add a script for the exploration of app-user pairs<commit_after>
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import random, support
import matplotlib.pyplot as pp
support.figure()
app_count = support.count_apps()
user_count = support.count_users()
while True:
while True:
app = None
user = random.randrange(user_count)
data = support.select_interarrivals(app=app, user=user)
if len(data) >= 10: break
pp.clf()
pp.plot(data)
pp.title('App {}, User {}, Samples {}'.format(app, user, len(data)))
pp.pause(1)
input()
|
Add a script for the exploration of app-user pairs#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import random, support
import matplotlib.pyplot as pp
support.figure()
app_count = support.count_apps()
user_count = support.count_users()
while True:
while True:
app = None
user = random.randrange(user_count)
data = support.select_interarrivals(app=app, user=user)
if len(data) >= 10: break
pp.clf()
pp.plot(data)
pp.title('App {}, User {}, Samples {}'.format(app, user, len(data)))
pp.pause(1)
input()
|
<commit_before><commit_msg>Add a script for the exploration of app-user pairs<commit_after>#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.dirname(__file__))
import random, support
import matplotlib.pyplot as pp
support.figure()
app_count = support.count_apps()
user_count = support.count_users()
while True:
while True:
app = None
user = random.randrange(user_count)
data = support.select_interarrivals(app=app, user=user)
if len(data) >= 10: break
pp.clf()
pp.plot(data)
pp.title('App {}, User {}, Samples {}'.format(app, user, len(data)))
pp.pause(1)
input()
|
|
fb9a381f7a45e79b8c67f516c2e4d76c9a1d9553
|
sts/util/rpc_forker.py
|
sts/util/rpc_forker.py
|
from abc import *
import os
from SimpleXMLRPCServer import SimpleXMLRPCServer
import xmlrpclib
import sys
class Forker(object):
''' Easily fork a job and retrieve the results '''
__metaclass__ = ABCMeta
def __init__(self, ip='localhost', port=3370):
self.ip = ip
self.port = port
self.server = SimpleXMLRPCServer((ip, port), allow_none=True)
self.server.register_function(self.return_to_parent, "return_to_parent")
self.client_return = None
@abstractmethod
def fork(self, code_block):
''' Fork off a child process, and run code_block. Return the json_hash
sent by the client'''
pass
def _invoke_child(self, code_block):
parent_url = "http://" + str(self.ip) + ":" + str(self.port) + "/"
proxy = xmlrpclib.ServerProxy(parent_url, allow_none=True)
client_return = code_block()
proxy.return_to_parent(client_return)
sys.exit(0)
def return_to_parent(self, client_return):
''' Invoked by child process to return a value '''
print "client_return!:" % client_return
self.client_return = client_return
return None
class LocalForker(Forker):
def fork(self, code_block):
# TODO(cs): use subprocess instead to spawn baby snakes
pid = os.fork()
if pid == 0: # Child
self._invoke_child(code_block)
else: # Parent
self.server.handle_request()
return self.client_return
class RemoteForker(Forker):
def __init__(self, server_info_list):
''' cycles through server_info_list for each invocation of fork() '''
pass
def fork(self, code_block):
# Would need bidirectional communication between parent and child..
# (Need to send code_block to client)
pass
|
Add a forker class: spawn children and return results via XML-RPC
|
Add a forker class: spawn children and return results via XML-RPC
|
Python
|
apache-2.0
|
jmiserez/sts,ucb-sts/sts,ucb-sts/sts,jmiserez/sts
|
Add a forker class: spawn children and return results via XML-RPC
|
from abc import *
import os
from SimpleXMLRPCServer import SimpleXMLRPCServer
import xmlrpclib
import sys
class Forker(object):
''' Easily fork a job and retrieve the results '''
__metaclass__ = ABCMeta
def __init__(self, ip='localhost', port=3370):
self.ip = ip
self.port = port
self.server = SimpleXMLRPCServer((ip, port), allow_none=True)
self.server.register_function(self.return_to_parent, "return_to_parent")
self.client_return = None
@abstractmethod
def fork(self, code_block):
''' Fork off a child process, and run code_block. Return the json_hash
sent by the client'''
pass
def _invoke_child(self, code_block):
parent_url = "http://" + str(self.ip) + ":" + str(self.port) + "/"
proxy = xmlrpclib.ServerProxy(parent_url, allow_none=True)
client_return = code_block()
proxy.return_to_parent(client_return)
sys.exit(0)
def return_to_parent(self, client_return):
''' Invoked by child process to return a value '''
print "client_return!:" % client_return
self.client_return = client_return
return None
class LocalForker(Forker):
def fork(self, code_block):
# TODO(cs): use subprocess instead to spawn baby snakes
pid = os.fork()
if pid == 0: # Child
self._invoke_child(code_block)
else: # Parent
self.server.handle_request()
return self.client_return
class RemoteForker(Forker):
def __init__(self, server_info_list):
''' cycles through server_info_list for each invocation of fork() '''
pass
def fork(self, code_block):
# Would need bidirectional communication between parent and child..
# (Need to send code_block to client)
pass
|
<commit_before><commit_msg>Add a forker class: spawn children and return results via XML-RPC<commit_after>
|
from abc import *
import os
from SimpleXMLRPCServer import SimpleXMLRPCServer
import xmlrpclib
import sys
class Forker(object):
''' Easily fork a job and retrieve the results '''
__metaclass__ = ABCMeta
def __init__(self, ip='localhost', port=3370):
self.ip = ip
self.port = port
self.server = SimpleXMLRPCServer((ip, port), allow_none=True)
self.server.register_function(self.return_to_parent, "return_to_parent")
self.client_return = None
@abstractmethod
def fork(self, code_block):
''' Fork off a child process, and run code_block. Return the json_hash
sent by the client'''
pass
def _invoke_child(self, code_block):
parent_url = "http://" + str(self.ip) + ":" + str(self.port) + "/"
proxy = xmlrpclib.ServerProxy(parent_url, allow_none=True)
client_return = code_block()
proxy.return_to_parent(client_return)
sys.exit(0)
def return_to_parent(self, client_return):
''' Invoked by child process to return a value '''
print "client_return!:" % client_return
self.client_return = client_return
return None
class LocalForker(Forker):
def fork(self, code_block):
# TODO(cs): use subprocess instead to spawn baby snakes
pid = os.fork()
if pid == 0: # Child
self._invoke_child(code_block)
else: # Parent
self.server.handle_request()
return self.client_return
class RemoteForker(Forker):
def __init__(self, server_info_list):
''' cycles through server_info_list for each invocation of fork() '''
pass
def fork(self, code_block):
# Would need bidirectional communication between parent and child..
# (Need to send code_block to client)
pass
|
Add a forker class: spawn children and return results via XML-RPC
from abc import *
import os
from SimpleXMLRPCServer import SimpleXMLRPCServer
import xmlrpclib
import sys
class Forker(object):
''' Easily fork a job and retrieve the results '''
__metaclass__ = ABCMeta
def __init__(self, ip='localhost', port=3370):
self.ip = ip
self.port = port
self.server = SimpleXMLRPCServer((ip, port), allow_none=True)
self.server.register_function(self.return_to_parent, "return_to_parent")
self.client_return = None
@abstractmethod
def fork(self, code_block):
''' Fork off a child process, and run code_block. Return the json_hash
sent by the client'''
pass
def _invoke_child(self, code_block):
parent_url = "http://" + str(self.ip) + ":" + str(self.port) + "/"
proxy = xmlrpclib.ServerProxy(parent_url, allow_none=True)
client_return = code_block()
proxy.return_to_parent(client_return)
sys.exit(0)
def return_to_parent(self, client_return):
''' Invoked by child process to return a value '''
print "client_return!:" % client_return
self.client_return = client_return
return None
class LocalForker(Forker):
def fork(self, code_block):
# TODO(cs): use subprocess instead to spawn baby snakes
pid = os.fork()
if pid == 0: # Child
self._invoke_child(code_block)
else: # Parent
self.server.handle_request()
return self.client_return
class RemoteForker(Forker):
def __init__(self, server_info_list):
''' cycles through server_info_list for each invocation of fork() '''
pass
def fork(self, code_block):
# Would need bidirectional communication between parent and child..
# (Need to send code_block to client)
pass
|
<commit_before><commit_msg>Add a forker class: spawn children and return results via XML-RPC<commit_after>
from abc import *
import os
from SimpleXMLRPCServer import SimpleXMLRPCServer
import xmlrpclib
import sys
class Forker(object):
''' Easily fork a job and retrieve the results '''
__metaclass__ = ABCMeta
def __init__(self, ip='localhost', port=3370):
self.ip = ip
self.port = port
self.server = SimpleXMLRPCServer((ip, port), allow_none=True)
self.server.register_function(self.return_to_parent, "return_to_parent")
self.client_return = None
@abstractmethod
def fork(self, code_block):
''' Fork off a child process, and run code_block. Return the json_hash
sent by the client'''
pass
def _invoke_child(self, code_block):
parent_url = "http://" + str(self.ip) + ":" + str(self.port) + "/"
proxy = xmlrpclib.ServerProxy(parent_url, allow_none=True)
client_return = code_block()
proxy.return_to_parent(client_return)
sys.exit(0)
def return_to_parent(self, client_return):
''' Invoked by child process to return a value '''
print "client_return!:" % client_return
self.client_return = client_return
return None
class LocalForker(Forker):
def fork(self, code_block):
# TODO(cs): use subprocess instead to spawn baby snakes
pid = os.fork()
if pid == 0: # Child
self._invoke_child(code_block)
else: # Parent
self.server.handle_request()
return self.client_return
class RemoteForker(Forker):
def __init__(self, server_info_list):
''' cycles through server_info_list for each invocation of fork() '''
pass
def fork(self, code_block):
# Would need bidirectional communication between parent and child..
# (Need to send code_block to client)
pass
|
|
48acf7dd057d17611dcafa279721a99434e99885
|
ou_noise.py
|
ou_noise.py
|
# --------------------------------------
# Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py
# --------------------------------------
import numpy as np
import numpy.random as nr
class OUNoise:
""" docstring for OUNoise """
def __init__(self,action_dimension,mu=0, theta=0.15, sigma=0.3):
self.action_dimension = action_dimension
self.mu = mu
self.theta = theta
self.sigma = sigma
self.state = np.ones(self.action_dimension) * self.mu
self.reset()
def reset(self):
self.state = np.ones(self.action_dimension) * self.mu
def noise(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x))
self.state = x + dx
return self.state
if __name__ == '__main__':
ou = OUNoise(3)
states = []
for i in range(1000):
states.append(ou.noise())
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
|
Add OU Noise as given in ddpg paper
|
Add OU Noise as given in ddpg paper
|
Python
|
mit
|
stevenpjg/ddpg-aigym
|
Add OU Noise as given in ddpg paper
|
# --------------------------------------
# Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py
# --------------------------------------
import numpy as np
import numpy.random as nr
class OUNoise:
""" docstring for OUNoise """
def __init__(self,action_dimension,mu=0, theta=0.15, sigma=0.3):
self.action_dimension = action_dimension
self.mu = mu
self.theta = theta
self.sigma = sigma
self.state = np.ones(self.action_dimension) * self.mu
self.reset()
def reset(self):
self.state = np.ones(self.action_dimension) * self.mu
def noise(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x))
self.state = x + dx
return self.state
if __name__ == '__main__':
ou = OUNoise(3)
states = []
for i in range(1000):
states.append(ou.noise())
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
|
<commit_before><commit_msg>Add OU Noise as given in ddpg paper<commit_after>
|
# --------------------------------------
# Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py
# --------------------------------------
import numpy as np
import numpy.random as nr
class OUNoise:
""" docstring for OUNoise """
def __init__(self,action_dimension,mu=0, theta=0.15, sigma=0.3):
self.action_dimension = action_dimension
self.mu = mu
self.theta = theta
self.sigma = sigma
self.state = np.ones(self.action_dimension) * self.mu
self.reset()
def reset(self):
self.state = np.ones(self.action_dimension) * self.mu
def noise(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x))
self.state = x + dx
return self.state
if __name__ == '__main__':
ou = OUNoise(3)
states = []
for i in range(1000):
states.append(ou.noise())
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
|
Add OU Noise as given in ddpg paper# --------------------------------------
# Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py
# --------------------------------------
import numpy as np
import numpy.random as nr
class OUNoise:
""" docstring for OUNoise """
def __init__(self,action_dimension,mu=0, theta=0.15, sigma=0.3):
self.action_dimension = action_dimension
self.mu = mu
self.theta = theta
self.sigma = sigma
self.state = np.ones(self.action_dimension) * self.mu
self.reset()
def reset(self):
self.state = np.ones(self.action_dimension) * self.mu
def noise(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x))
self.state = x + dx
return self.state
if __name__ == '__main__':
ou = OUNoise(3)
states = []
for i in range(1000):
states.append(ou.noise())
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
|
<commit_before><commit_msg>Add OU Noise as given in ddpg paper<commit_after># --------------------------------------
# Reference: https://github.com/rllab/rllab/blob/master/rllab/exploration_strategies/ou_strategy.py
# --------------------------------------
import numpy as np
import numpy.random as nr
class OUNoise:
""" docstring for OUNoise """
def __init__(self,action_dimension,mu=0, theta=0.15, sigma=0.3):
self.action_dimension = action_dimension
self.mu = mu
self.theta = theta
self.sigma = sigma
self.state = np.ones(self.action_dimension) * self.mu
self.reset()
def reset(self):
self.state = np.ones(self.action_dimension) * self.mu
def noise(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * nr.randn(len(x))
self.state = x + dx
return self.state
if __name__ == '__main__':
ou = OUNoise(3)
states = []
for i in range(1000):
states.append(ou.noise())
import matplotlib.pyplot as plt
plt.plot(states)
plt.show()
|
|
3c5c28f598738be2457948aa8872196604496c73
|
custom-scripts/long-term-formula.py
|
custom-scripts/long-term-formula.py
|
#! /usr/bin/python3
import argparse
def generate_rows(lower, upper):
assert (lower <= upper), "Lower limit can not be larger than upper"
for i in range(lower, upper):
print("=VLOOKUP(C{0},MFPortfolio!G52:MFPortfolio!H72,2,FALSE)".format(i))
def main():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('upper', type=int, help='Upper limit to get rows')
parser.add_argument('--lower', type=int, default=3, help='Lower limit, defaults to 3')
args = parser.parse_args()
generate_rows(args.lower, args.upper)
main()
|
Add script to generate sheets formula for personal usage
|
Add script to generate sheets formula for personal usage
|
Python
|
mit
|
ayushgoel/dotfiles,ayushgoel/dotfiles,ayushgoel/dotfiles
|
Add script to generate sheets formula for personal usage
|
#! /usr/bin/python3
import argparse
def generate_rows(lower, upper):
assert (lower <= upper), "Lower limit can not be larger than upper"
for i in range(lower, upper):
print("=VLOOKUP(C{0},MFPortfolio!G52:MFPortfolio!H72,2,FALSE)".format(i))
def main():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('upper', type=int, help='Upper limit to get rows')
parser.add_argument('--lower', type=int, default=3, help='Lower limit, defaults to 3')
args = parser.parse_args()
generate_rows(args.lower, args.upper)
main()
|
<commit_before><commit_msg>Add script to generate sheets formula for personal usage<commit_after>
|
#! /usr/bin/python3
import argparse
def generate_rows(lower, upper):
assert (lower <= upper), "Lower limit can not be larger than upper"
for i in range(lower, upper):
print("=VLOOKUP(C{0},MFPortfolio!G52:MFPortfolio!H72,2,FALSE)".format(i))
def main():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('upper', type=int, help='Upper limit to get rows')
parser.add_argument('--lower', type=int, default=3, help='Lower limit, defaults to 3')
args = parser.parse_args()
generate_rows(args.lower, args.upper)
main()
|
Add script to generate sheets formula for personal usage#! /usr/bin/python3
import argparse
def generate_rows(lower, upper):
assert (lower <= upper), "Lower limit can not be larger than upper"
for i in range(lower, upper):
print("=VLOOKUP(C{0},MFPortfolio!G52:MFPortfolio!H72,2,FALSE)".format(i))
def main():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('upper', type=int, help='Upper limit to get rows')
parser.add_argument('--lower', type=int, default=3, help='Lower limit, defaults to 3')
args = parser.parse_args()
generate_rows(args.lower, args.upper)
main()
|
<commit_before><commit_msg>Add script to generate sheets formula for personal usage<commit_after>#! /usr/bin/python3
import argparse
def generate_rows(lower, upper):
assert (lower <= upper), "Lower limit can not be larger than upper"
for i in range(lower, upper):
print("=VLOOKUP(C{0},MFPortfolio!G52:MFPortfolio!H72,2,FALSE)".format(i))
def main():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('upper', type=int, help='Upper limit to get rows')
parser.add_argument('--lower', type=int, default=3, help='Lower limit, defaults to 3')
args = parser.parse_args()
generate_rows(args.lower, args.upper)
main()
|
|
7b2c418cbe6291afa4d339278d549dda162b382e
|
tests/test_version_parser.py
|
tests/test_version_parser.py
|
import pytest
from leak.version_parser import versions_split
def test_versions_split():
pass
def test_wrong_versions_split():
# too many dots
assert versions_split('1.2.3.4') == [0, 0, 0]
# test missing numeric version
with pytest.raises(ValueError):
versions_split('not.numeric')
# test not string provided
with pytest.raises(AttributeError):
versions_split(12345)
|
Add some dummy test for negative cases when version splitting
|
Add some dummy test for negative cases when version splitting
|
Python
|
mit
|
bmwant21/leak
|
Add some dummy test for negative cases when version splitting
|
import pytest
from leak.version_parser import versions_split
def test_versions_split():
pass
def test_wrong_versions_split():
# too many dots
assert versions_split('1.2.3.4') == [0, 0, 0]
# test missing numeric version
with pytest.raises(ValueError):
versions_split('not.numeric')
# test not string provided
with pytest.raises(AttributeError):
versions_split(12345)
|
<commit_before><commit_msg>Add some dummy test for negative cases when version splitting<commit_after>
|
import pytest
from leak.version_parser import versions_split
def test_versions_split():
pass
def test_wrong_versions_split():
# too many dots
assert versions_split('1.2.3.4') == [0, 0, 0]
# test missing numeric version
with pytest.raises(ValueError):
versions_split('not.numeric')
# test not string provided
with pytest.raises(AttributeError):
versions_split(12345)
|
Add some dummy test for negative cases when version splittingimport pytest
from leak.version_parser import versions_split
def test_versions_split():
pass
def test_wrong_versions_split():
# too many dots
assert versions_split('1.2.3.4') == [0, 0, 0]
# test missing numeric version
with pytest.raises(ValueError):
versions_split('not.numeric')
# test not string provided
with pytest.raises(AttributeError):
versions_split(12345)
|
<commit_before><commit_msg>Add some dummy test for negative cases when version splitting<commit_after>import pytest
from leak.version_parser import versions_split
def test_versions_split():
pass
def test_wrong_versions_split():
# too many dots
assert versions_split('1.2.3.4') == [0, 0, 0]
# test missing numeric version
with pytest.raises(ValueError):
versions_split('not.numeric')
# test not string provided
with pytest.raises(AttributeError):
versions_split(12345)
|
|
48944ae9625cf36457b053508eff1e189c64f2c4
|
libraries/verification.py
|
libraries/verification.py
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = eval(model).objects.get(url = key)
except eval(model).DoesNotExist as error:
raise OkupyException('URL not found')
return result
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = model.objects.get(url = key)
except model.DoesNotExist:
raise OkupyException('URL not found')
return result
|
Fix bug in checkConfirmationKey, call the model directly instead of eval(arg)
|
Fix bug in checkConfirmationKey, call the model directly instead of
eval(arg)
|
Python
|
agpl-3.0
|
gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org,gentoo/identity.gentoo.org
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = eval(model).objects.get(url = key)
except eval(model).DoesNotExist as error:
raise OkupyException('URL not found')
return result
Fix bug in checkConfirmationKey, call the model directly instead of
eval(arg)
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = model.objects.get(url = key)
except model.DoesNotExist:
raise OkupyException('URL not found')
return result
|
<commit_before>from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = eval(model).objects.get(url = key)
except eval(model).DoesNotExist as error:
raise OkupyException('URL not found')
return result
<commit_msg>Fix bug in checkConfirmationKey, call the model directly instead of
eval(arg)<commit_after>
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = model.objects.get(url = key)
except model.DoesNotExist:
raise OkupyException('URL not found')
return result
|
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = eval(model).objects.get(url = key)
except eval(model).DoesNotExist as error:
raise OkupyException('URL not found')
return result
Fix bug in checkConfirmationKey, call the model directly instead of
eval(arg)from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = model.objects.get(url = key)
except model.DoesNotExist:
raise OkupyException('URL not found')
return result
|
<commit_before>from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = eval(model).objects.get(url = key)
except eval(model).DoesNotExist as error:
raise OkupyException('URL not found')
return result
<commit_msg>Fix bug in checkConfirmationKey, call the model directly instead of
eval(arg)<commit_after>from django.contrib.sites.models import Site
from django.core.mail import send_mail
from okupy.libraries.encryption import random_string
from okupy.libraries.exception import OkupyException, log_extra_data
import logging
logger = logging.getLogger('okupy')
def sendConfirmationEmail(request, form, model):
'''
Create a random URL, add it to the appropriate table along with
the user data, and send an email to the user to confirm his email address
'''
random_url = random_string(30)
inactive_email = model(email = form.cleaned_data['email'],
user = form.cleaned_data['username'],
url = random_url)
try:
inactive_email.save()
except Exception as error:
logger.error(error, extra = log_extra_data(request, form))
raise OkupyException('Could not save to DB')
send_mail('[%s]: Please confirm your email address' % Site.objects.get_current().name or None,
'To confirm your email address, please click <a href="/%s">here</a>' % random_url,
'admin@tampakrap.gr',
[form.cleaned_data['email']])
def checkConfirmationKey(key, model):
'''
Check if the URL matches any of the confirmation keys in the database
'''
try:
result = model.objects.get(url = key)
except model.DoesNotExist:
raise OkupyException('URL not found')
return result
|
6a91b842626d531f7242994c91bb496f5b90d6c7
|
web/ctf_gameserver/web/scoring/forms.py
|
web/ctf_gameserver/web/scoring/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from . import models
class GameControlAdminForm(forms.ModelForm):
"""
Form for the GameControl object, designed to be used in GameControlAdmin.
"""
class Meta:
model = models.GameControl
fields = '__all__'
help_texts = {
'tick_duration': _('Duration of one tick in seconds'),
'valid_ticks': _('Number of ticks a flag is valid for')
}
|
Add missing file, which actually belongs to last commit
|
Add missing file, which actually belongs to last commit
|
Python
|
isc
|
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
|
Add missing file, which actually belongs to last commit
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from . import models
class GameControlAdminForm(forms.ModelForm):
"""
Form for the GameControl object, designed to be used in GameControlAdmin.
"""
class Meta:
model = models.GameControl
fields = '__all__'
help_texts = {
'tick_duration': _('Duration of one tick in seconds'),
'valid_ticks': _('Number of ticks a flag is valid for')
}
|
<commit_before><commit_msg>Add missing file, which actually belongs to last commit<commit_after>
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from . import models
class GameControlAdminForm(forms.ModelForm):
"""
Form for the GameControl object, designed to be used in GameControlAdmin.
"""
class Meta:
model = models.GameControl
fields = '__all__'
help_texts = {
'tick_duration': _('Duration of one tick in seconds'),
'valid_ticks': _('Number of ticks a flag is valid for')
}
|
Add missing file, which actually belongs to last commitfrom django import forms
from django.utils.translation import ugettext_lazy as _
from . import models
class GameControlAdminForm(forms.ModelForm):
"""
Form for the GameControl object, designed to be used in GameControlAdmin.
"""
class Meta:
model = models.GameControl
fields = '__all__'
help_texts = {
'tick_duration': _('Duration of one tick in seconds'),
'valid_ticks': _('Number of ticks a flag is valid for')
}
|
<commit_before><commit_msg>Add missing file, which actually belongs to last commit<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from . import models
class GameControlAdminForm(forms.ModelForm):
"""
Form for the GameControl object, designed to be used in GameControlAdmin.
"""
class Meta:
model = models.GameControl
fields = '__all__'
help_texts = {
'tick_duration': _('Duration of one tick in seconds'),
'valid_ticks': _('Number of ticks a flag is valid for')
}
|
|
e2e609aec055a6e81a0ca7cb747809081ade81b1
|
designate/backend/impl_powerdns/migrate_repo/versions/010_records_add_disabled_column.py
|
designate/backend/impl_powerdns/migrate_repo/versions/010_records_add_disabled_column.py
|
# Copyright 2015 NetEase, Inc.
#
# Author: Zhang Gengyuan <stanzgy@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.dialects.mysql import TINYINT
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
disabled = Column('disabled', TINYINT(1), server_default='0')
disabled.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.disabled.drop()
|
Fix powerdns db table records missing column disabled
|
Fix powerdns db table records missing column disabled
Fix powerdns db table 'records' missing column 'disabled', which makes powerdns
service unable to start.
Change-Id: I3b4c056ca68c44b7e42d20faba555471171b661f
Closes-Bug: 1409629
|
Python
|
apache-2.0
|
cneill/designate,ionrock/designate,kiall/designate-py3,ionrock/designate,muraliselva10/designate,openstack/designate,kiall/designate-py3,ramsateesh/designate,muraliselva10/designate,cneill/designate-testing,grahamhayes/designate,kiall/designate-py3,muraliselva10/designate,openstack/designate,cneill/designate-testing,cneill/designate,grahamhayes/designate,ionrock/designate,tonyli71/designate,cneill/designate,cneill/designate,tonyli71/designate,ramsateesh/designate,tonyli71/designate,grahamhayes/designate,kiall/designate-py3,cneill/designate-testing,ramsateesh/designate,cneill/designate,kiall/designate-py3,openstack/designate
|
Fix powerdns db table records missing column disabled
Fix powerdns db table 'records' missing column 'disabled', which makes powerdns
service unable to start.
Change-Id: I3b4c056ca68c44b7e42d20faba555471171b661f
Closes-Bug: 1409629
|
# Copyright 2015 NetEase, Inc.
#
# Author: Zhang Gengyuan <stanzgy@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.dialects.mysql import TINYINT
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
disabled = Column('disabled', TINYINT(1), server_default='0')
disabled.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.disabled.drop()
|
<commit_before><commit_msg>Fix powerdns db table records missing column disabled
Fix powerdns db table 'records' missing column 'disabled', which makes powerdns
service unable to start.
Change-Id: I3b4c056ca68c44b7e42d20faba555471171b661f
Closes-Bug: 1409629<commit_after>
|
# Copyright 2015 NetEase, Inc.
#
# Author: Zhang Gengyuan <stanzgy@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.dialects.mysql import TINYINT
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
disabled = Column('disabled', TINYINT(1), server_default='0')
disabled.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.disabled.drop()
|
Fix powerdns db table records missing column disabled
Fix powerdns db table 'records' missing column 'disabled', which makes powerdns
service unable to start.
Change-Id: I3b4c056ca68c44b7e42d20faba555471171b661f
Closes-Bug: 1409629# Copyright 2015 NetEase, Inc.
#
# Author: Zhang Gengyuan <stanzgy@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.dialects.mysql import TINYINT
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
disabled = Column('disabled', TINYINT(1), server_default='0')
disabled.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.disabled.drop()
|
<commit_before><commit_msg>Fix powerdns db table records missing column disabled
Fix powerdns db table 'records' missing column 'disabled', which makes powerdns
service unable to start.
Change-Id: I3b4c056ca68c44b7e42d20faba555471171b661f
Closes-Bug: 1409629<commit_after># Copyright 2015 NetEase, Inc.
#
# Author: Zhang Gengyuan <stanzgy@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.dialects.mysql import TINYINT
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
disabled = Column('disabled', TINYINT(1), server_default='0')
disabled.create(records_table)
def downgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
records_table.c.disabled.drop()
|
|
89597c9a277cb092e2a61450efad3d95574ec3b1
|
scripts/password_confirmation_reminders.py
|
scripts/password_confirmation_reminders.py
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=True).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=False).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
Fix bug in email confirmation scripts.
|
Fix bug in email confirmation scripts.
|
Python
|
mpl-2.0
|
seanastephens/communityshare,seanastephens/communityshare,seanastephens/communityshare,communityshare/communityshare,communityshare/communityshare,seanastephens/communityshare,communityshare/communityshare,communityshare/communityshare
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=True).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
Fix bug in email confirmation scripts.
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=False).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
<commit_before>'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=True).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
<commit_msg>Fix bug in email confirmation scripts.<commit_after>
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=False).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=True).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
Fix bug in email confirmation scripts.'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=False).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
<commit_before>'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=True).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
<commit_msg>Fix bug in email confirmation scripts.<commit_after>'''
Send an email to users who haven't confirmed their passwords asking them to confirm.
'''
import logging
from community_share.models.user import User
from community_share.models.share import Event
from community_share.models.conversation import Conversation
from community_share import mail_actions, config, store
def send_reminders():
users = store.session.query(User).filter_by(
active=True, email_confirmed=False).all()
for user in users:
template = mail_actions.CONFIRM_EMAIL_REMINDER_TEMPLATE
subject = 'Please confirm email address.'
mail_actions.request_signup_email_confirmation(
user, template=template, subject=subject)
if __name__ == '__main__':
config.load_from_environment()
logger = logging.getLogger(__name__)
send_reminders()
|
32ceedf7be64bde3dfd5804e87d7337c566a28e5
|
OIPA/api/activity/tests/test_filters.py
|
OIPA/api/activity/tests/test_filters.py
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from iati.factory.iati_factory import ActivityFactory, OtherIdentifierFactory
class ActivityFiltersTestCase(APITestCase):
def setUp(self):
self.first_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-201NEP1000'
)
self.first_other_identifier = OtherIdentifierFactory(
activity=self.first_activity,
identifier='AAA'
)
def test_other_identifier_filter_single_identifier(self):
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': self.first_other_identifier.identifier,
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 1)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
def test_other_identifier_filter_multiple_identifiers(self):
# Let's create more activities:
self.second_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-170GLO4038'
)
self.second_other_identifier = OtherIdentifierFactory(
activity=self.second_activity,
identifier='BBB'
)
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': '%s,%s' % (
self.first_other_identifier.identifier,
self.second_other_identifier.identifier
)
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 2)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
self.assertEquals(
response.data['results'][1]['iati_identifier'],
self.second_activity.iati_identifier
)
|
Add tests for new 'other_identifier' filter
|
Add tests for new 'other_identifier' filter
|
Python
|
agpl-3.0
|
openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA
|
Add tests for new 'other_identifier' filter
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from iati.factory.iati_factory import ActivityFactory, OtherIdentifierFactory
class ActivityFiltersTestCase(APITestCase):
def setUp(self):
self.first_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-201NEP1000'
)
self.first_other_identifier = OtherIdentifierFactory(
activity=self.first_activity,
identifier='AAA'
)
def test_other_identifier_filter_single_identifier(self):
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': self.first_other_identifier.identifier,
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 1)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
def test_other_identifier_filter_multiple_identifiers(self):
# Let's create more activities:
self.second_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-170GLO4038'
)
self.second_other_identifier = OtherIdentifierFactory(
activity=self.second_activity,
identifier='BBB'
)
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': '%s,%s' % (
self.first_other_identifier.identifier,
self.second_other_identifier.identifier
)
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 2)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
self.assertEquals(
response.data['results'][1]['iati_identifier'],
self.second_activity.iati_identifier
)
|
<commit_before><commit_msg>Add tests for new 'other_identifier' filter<commit_after>
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from iati.factory.iati_factory import ActivityFactory, OtherIdentifierFactory
class ActivityFiltersTestCase(APITestCase):
def setUp(self):
self.first_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-201NEP1000'
)
self.first_other_identifier = OtherIdentifierFactory(
activity=self.first_activity,
identifier='AAA'
)
def test_other_identifier_filter_single_identifier(self):
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': self.first_other_identifier.identifier,
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 1)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
def test_other_identifier_filter_multiple_identifiers(self):
# Let's create more activities:
self.second_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-170GLO4038'
)
self.second_other_identifier = OtherIdentifierFactory(
activity=self.second_activity,
identifier='BBB'
)
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': '%s,%s' % (
self.first_other_identifier.identifier,
self.second_other_identifier.identifier
)
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 2)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
self.assertEquals(
response.data['results'][1]['iati_identifier'],
self.second_activity.iati_identifier
)
|
Add tests for new 'other_identifier' filterfrom django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from iati.factory.iati_factory import ActivityFactory, OtherIdentifierFactory
class ActivityFiltersTestCase(APITestCase):
def setUp(self):
self.first_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-201NEP1000'
)
self.first_other_identifier = OtherIdentifierFactory(
activity=self.first_activity,
identifier='AAA'
)
def test_other_identifier_filter_single_identifier(self):
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': self.first_other_identifier.identifier,
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 1)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
def test_other_identifier_filter_multiple_identifiers(self):
# Let's create more activities:
self.second_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-170GLO4038'
)
self.second_other_identifier = OtherIdentifierFactory(
activity=self.second_activity,
identifier='BBB'
)
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': '%s,%s' % (
self.first_other_identifier.identifier,
self.second_other_identifier.identifier
)
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 2)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
self.assertEquals(
response.data['results'][1]['iati_identifier'],
self.second_activity.iati_identifier
)
|
<commit_before><commit_msg>Add tests for new 'other_identifier' filter<commit_after>from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from iati.factory.iati_factory import ActivityFactory, OtherIdentifierFactory
class ActivityFiltersTestCase(APITestCase):
def setUp(self):
self.first_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-201NEP1000'
)
self.first_other_identifier = OtherIdentifierFactory(
activity=self.first_activity,
identifier='AAA'
)
def test_other_identifier_filter_single_identifier(self):
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': self.first_other_identifier.identifier,
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 1)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
def test_other_identifier_filter_multiple_identifiers(self):
# Let's create more activities:
self.second_activity = ActivityFactory(
iati_identifier='XM-DAC-41304-170GLO4038'
)
self.second_other_identifier = OtherIdentifierFactory(
activity=self.second_activity,
identifier='BBB'
)
url = reverse('activities:activity-list')
response = self.client.get(url)
response = self.client.get(
url, {
'other_identifier': '%s,%s' % (
self.first_other_identifier.identifier,
self.second_other_identifier.identifier
)
}, format='json'
)
self.assertTrue(status.is_success(response.status_code))
self.assertEquals(response.data['count'], 2)
self.assertEquals(
response.data['results'][0]['iati_identifier'],
self.first_activity.iati_identifier
)
self.assertEquals(
response.data['results'][1]['iati_identifier'],
self.second_activity.iati_identifier
)
|
|
e238b3b7ffea183f22d8171c8640e8a888fda791
|
zinnia/tests/test_context.py
|
zinnia/tests/test_context.py
|
"""Test cases for Zinnia Context"""
from django.test import TestCase
from django.template import Context
from django.core.paginator import Paginator
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_position
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_position(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_position(context),
0)
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_position(context),
5)
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_position(context),
25)
|
Add unit tests for zinnia.context
|
Add unit tests for zinnia.context
|
Python
|
bsd-3-clause
|
Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Fantomas42/django-blog-zinnia,Zopieux/django-blog-zinnia,ghachey/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Zopieux/django-blog-zinnia,Zopieux/django-blog-zinnia
|
Add unit tests for zinnia.context
|
"""Test cases for Zinnia Context"""
from django.test import TestCase
from django.template import Context
from django.core.paginator import Paginator
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_position
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_position(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_position(context),
0)
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_position(context),
5)
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_position(context),
25)
|
<commit_before><commit_msg>Add unit tests for zinnia.context<commit_after>
|
"""Test cases for Zinnia Context"""
from django.test import TestCase
from django.template import Context
from django.core.paginator import Paginator
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_position
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_position(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_position(context),
0)
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_position(context),
5)
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_position(context),
25)
|
Add unit tests for zinnia.context"""Test cases for Zinnia Context"""
from django.test import TestCase
from django.template import Context
from django.core.paginator import Paginator
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_position
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_position(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_position(context),
0)
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_position(context),
5)
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_position(context),
25)
|
<commit_before><commit_msg>Add unit tests for zinnia.context<commit_after>"""Test cases for Zinnia Context"""
from django.test import TestCase
from django.template import Context
from django.core.paginator import Paginator
from zinnia.context import get_context_first_object
from zinnia.context import get_context_loop_position
class ContextTestCase(TestCase):
"""Tests cases for context"""
def test_get_context_first_object(self):
context = Context({'a': 1, 'b': 2, 'c': 3})
self.assertEqual(
get_context_first_object(context, ['key']),
None)
self.assertEqual(
get_context_first_object(context, ['a']),
1)
self.assertEqual(
get_context_first_object(context, ['b', 'a']),
2)
def test_get_context_loop_position(self):
paginator = Paginator(range(50), 10)
context = Context({})
self.assertEqual(
get_context_loop_position(context),
0)
context = Context({'forloop': {'counter': 5}})
self.assertEqual(
get_context_loop_position(context),
5)
context = Context({'forloop': {'counter': 5},
'page_obj': paginator.page(3)})
self.assertEqual(
get_context_loop_position(context),
25)
|
|
fb4fc80c5e95308f363d96d02193300abbeb22c9
|
hotline/db/db_mongo.py
|
hotline/db/db_mongo.py
|
import os
from pymongo import MongoClient
from urllib.parse import urlparse
mongo_url = os.environ.get('MONGODB_URL', 'mongodb://localhost:27017/')
# mongo_url_parse = urlparse(mongo_url)
mongo_client = MongoClient(mongo_url)
mongo_db = mongo_client['hotline']
|
Define layer for Mongo client
|
Define layer for Mongo client
|
Python
|
mit
|
wearhacks/hackathon_hotline
|
Define layer for Mongo client
|
import os
from pymongo import MongoClient
from urllib.parse import urlparse
mongo_url = os.environ.get('MONGODB_URL', 'mongodb://localhost:27017/')
# mongo_url_parse = urlparse(mongo_url)
mongo_client = MongoClient(mongo_url)
mongo_db = mongo_client['hotline']
|
<commit_before><commit_msg>Define layer for Mongo client<commit_after>
|
import os
from pymongo import MongoClient
from urllib.parse import urlparse
mongo_url = os.environ.get('MONGODB_URL', 'mongodb://localhost:27017/')
# mongo_url_parse = urlparse(mongo_url)
mongo_client = MongoClient(mongo_url)
mongo_db = mongo_client['hotline']
|
Define layer for Mongo clientimport os
from pymongo import MongoClient
from urllib.parse import urlparse
mongo_url = os.environ.get('MONGODB_URL', 'mongodb://localhost:27017/')
# mongo_url_parse = urlparse(mongo_url)
mongo_client = MongoClient(mongo_url)
mongo_db = mongo_client['hotline']
|
<commit_before><commit_msg>Define layer for Mongo client<commit_after>import os
from pymongo import MongoClient
from urllib.parse import urlparse
mongo_url = os.environ.get('MONGODB_URL', 'mongodb://localhost:27017/')
# mongo_url_parse = urlparse(mongo_url)
mongo_client = MongoClient(mongo_url)
mongo_db = mongo_client['hotline']
|
|
1c18e61ece7f05ac5b1afd276c72a6a242e9fb66
|
braid/info.py
|
braid/info.py
|
from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
|
Add some tools for detecting the remote distribution.
|
Add some tools for detecting the remote distribution.
|
Python
|
mit
|
alex/braid,alex/braid
|
Add some tools for detecting the remote distribution.
|
from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
|
<commit_before><commit_msg>Add some tools for detecting the remote distribution.<commit_after>
|
from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
|
Add some tools for detecting the remote distribution.from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
|
<commit_before><commit_msg>Add some tools for detecting the remote distribution.<commit_after>from fabric.api import run, quiet
from braid import succeeds, cacheInEnvironment
@cacheInEnvironment
def distroName():
"""
Get the name of the distro.
"""
with quiet():
lsb = run('lsb_release --id --short', warn_only=True)
if lsb.succeeded:
return lsb.lower()
distros = [
('centos', '/etc/centos-release'),
('fedora', '/etc/fedora-release'),
]
for distro, sentinel in distros:
if succeeds('test -f {}'.format(sentinel)):
return distro
def distroFamily():
"""
Get the family of the distro.
@returns: C{'debian'} or C{'fedora'}
"""
families = {
'debian': ['debian', 'ubuntu'],
'fedora': ['fedora', 'centos', 'rhel'],
}
distro = distroName()
for family, members in families.iteritems():
if distro in members:
return family
return 'other'
|
|
16ab1054d77d0f50762122cb747ba0d819f3ec2c
|
numba/cuda/tests/cudapy/test_lineinfo.py
|
numba/cuda/tests/cudapy/test_lineinfo.py
|
from numba.cuda.testing import skip_on_cudasim
from numba import cuda
from numba.core import types
from numba.cuda.testing import CUDATestCase
import re
import unittest
@skip_on_cudasim('Simulator does not produce lineinfo')
class TestCudaLineInfo(CUDATestCase):
"""
These tests only check the compiled PTX for line mappings
"""
def _getasm(self, fn, sig):
fn.compile(sig)
return fn.inspect_asm(sig)
def _check(self, fn, sig, expect):
asm = self._getasm(fn, sig=sig)
# The name of this file should be present in the line mapping
# if lineinfo was propagated through correctly.
re_section_lineinfo = re.compile(r"test_lineinfo.py")
match = re_section_lineinfo.search(asm)
assertfn = self.assertIsNotNone if expect else self.assertIsNone
assertfn(match, msg=asm)
def test_no_lineinfo_in_asm(self):
@cuda.jit(lineinfo=False)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=False)
def test_lineinfo_in_asm(self):
@cuda.jit(lineinfo=True)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=True)
if __name__ == '__main__':
unittest.main()
|
Add tests for lineinfo in cuda.jit
|
Add tests for lineinfo in cuda.jit
|
Python
|
bsd-2-clause
|
stonebig/numba,cpcloud/numba,stuartarchibald/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stonebig/numba,IntelLabs/numba,stonebig/numba,numba/numba,IntelLabs/numba,stonebig/numba,seibert/numba,IntelLabs/numba,numba/numba,stuartarchibald/numba,seibert/numba,cpcloud/numba,seibert/numba,cpcloud/numba,seibert/numba,cpcloud/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,numba/numba,stuartarchibald/numba,seibert/numba,stuartarchibald/numba,stonebig/numba
|
Add tests for lineinfo in cuda.jit
|
from numba.cuda.testing import skip_on_cudasim
from numba import cuda
from numba.core import types
from numba.cuda.testing import CUDATestCase
import re
import unittest
@skip_on_cudasim('Simulator does not produce lineinfo')
class TestCudaLineInfo(CUDATestCase):
"""
These tests only check the compiled PTX for line mappings
"""
def _getasm(self, fn, sig):
fn.compile(sig)
return fn.inspect_asm(sig)
def _check(self, fn, sig, expect):
asm = self._getasm(fn, sig=sig)
# The name of this file should be present in the line mapping
# if lineinfo was propagated through correctly.
re_section_lineinfo = re.compile(r"test_lineinfo.py")
match = re_section_lineinfo.search(asm)
assertfn = self.assertIsNotNone if expect else self.assertIsNone
assertfn(match, msg=asm)
def test_no_lineinfo_in_asm(self):
@cuda.jit(lineinfo=False)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=False)
def test_lineinfo_in_asm(self):
@cuda.jit(lineinfo=True)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=True)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for lineinfo in cuda.jit<commit_after>
|
from numba.cuda.testing import skip_on_cudasim
from numba import cuda
from numba.core import types
from numba.cuda.testing import CUDATestCase
import re
import unittest
@skip_on_cudasim('Simulator does not produce lineinfo')
class TestCudaLineInfo(CUDATestCase):
"""
These tests only check the compiled PTX for line mappings
"""
def _getasm(self, fn, sig):
fn.compile(sig)
return fn.inspect_asm(sig)
def _check(self, fn, sig, expect):
asm = self._getasm(fn, sig=sig)
# The name of this file should be present in the line mapping
# if lineinfo was propagated through correctly.
re_section_lineinfo = re.compile(r"test_lineinfo.py")
match = re_section_lineinfo.search(asm)
assertfn = self.assertIsNotNone if expect else self.assertIsNone
assertfn(match, msg=asm)
def test_no_lineinfo_in_asm(self):
@cuda.jit(lineinfo=False)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=False)
def test_lineinfo_in_asm(self):
@cuda.jit(lineinfo=True)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=True)
if __name__ == '__main__':
unittest.main()
|
Add tests for lineinfo in cuda.jitfrom numba.cuda.testing import skip_on_cudasim
from numba import cuda
from numba.core import types
from numba.cuda.testing import CUDATestCase
import re
import unittest
@skip_on_cudasim('Simulator does not produce lineinfo')
class TestCudaLineInfo(CUDATestCase):
"""
These tests only check the compiled PTX for line mappings
"""
def _getasm(self, fn, sig):
fn.compile(sig)
return fn.inspect_asm(sig)
def _check(self, fn, sig, expect):
asm = self._getasm(fn, sig=sig)
# The name of this file should be present in the line mapping
# if lineinfo was propagated through correctly.
re_section_lineinfo = re.compile(r"test_lineinfo.py")
match = re_section_lineinfo.search(asm)
assertfn = self.assertIsNotNone if expect else self.assertIsNone
assertfn(match, msg=asm)
def test_no_lineinfo_in_asm(self):
@cuda.jit(lineinfo=False)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=False)
def test_lineinfo_in_asm(self):
@cuda.jit(lineinfo=True)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=True)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for lineinfo in cuda.jit<commit_after>from numba.cuda.testing import skip_on_cudasim
from numba import cuda
from numba.core import types
from numba.cuda.testing import CUDATestCase
import re
import unittest
@skip_on_cudasim('Simulator does not produce lineinfo')
class TestCudaLineInfo(CUDATestCase):
"""
These tests only check the compiled PTX for line mappings
"""
def _getasm(self, fn, sig):
fn.compile(sig)
return fn.inspect_asm(sig)
def _check(self, fn, sig, expect):
asm = self._getasm(fn, sig=sig)
# The name of this file should be present in the line mapping
# if lineinfo was propagated through correctly.
re_section_lineinfo = re.compile(r"test_lineinfo.py")
match = re_section_lineinfo.search(asm)
assertfn = self.assertIsNotNone if expect else self.assertIsNone
assertfn(match, msg=asm)
def test_no_lineinfo_in_asm(self):
@cuda.jit(lineinfo=False)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=False)
def test_lineinfo_in_asm(self):
@cuda.jit(lineinfo=True)
def foo(x):
x[0] = 1
self._check(foo, sig=(types.int32[:],), expect=True)
if __name__ == '__main__':
unittest.main()
|
|
acebd3a5e91788c95cbd22745620d6e05e8bf3a3
|
zerver/migrations/0126_disallow_chars_in_stream_and_user_name.py
|
zerver/migrations/0126_disallow_chars_in_stream_and_user_name.py
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0125_realm_max_invites'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
Replace special chars in stream & user names with space.
|
migrations: Replace special chars in stream & user names with space.
Also handle potential duplicate stream names created.
Fixes #6534
|
Python
|
apache-2.0
|
punchagan/zulip,showell/zulip,shubhamdhama/zulip,jackrzhang/zulip,jackrzhang/zulip,rishig/zulip,brainwane/zulip,rishig/zulip,hackerkid/zulip,rht/zulip,zulip/zulip,tommyip/zulip,rishig/zulip,timabbott/zulip,showell/zulip,kou/zulip,rishig/zulip,brainwane/zulip,hackerkid/zulip,kou/zulip,jackrzhang/zulip,rishig/zulip,kou/zulip,eeshangarg/zulip,rht/zulip,timabbott/zulip,hackerkid/zulip,zulip/zulip,hackerkid/zulip,dhcrzf/zulip,brainwane/zulip,mahim97/zulip,zulip/zulip,kou/zulip,dhcrzf/zulip,timabbott/zulip,synicalsyntax/zulip,punchagan/zulip,dhcrzf/zulip,rht/zulip,mahim97/zulip,eeshangarg/zulip,synicalsyntax/zulip,timabbott/zulip,rht/zulip,brainwane/zulip,eeshangarg/zulip,rishig/zulip,shubhamdhama/zulip,zulip/zulip,hackerkid/zulip,brainwane/zulip,showell/zulip,showell/zulip,synicalsyntax/zulip,synicalsyntax/zulip,andersk/zulip,jackrzhang/zulip,eeshangarg/zulip,showell/zulip,tommyip/zulip,andersk/zulip,andersk/zulip,zulip/zulip,mahim97/zulip,rht/zulip,andersk/zulip,zulip/zulip,tommyip/zulip,andersk/zulip,synicalsyntax/zulip,timabbott/zulip,shubhamdhama/zulip,kou/zulip,tommyip/zulip,timabbott/zulip,showell/zulip,synicalsyntax/zulip,shubhamdhama/zulip,mahim97/zulip,andersk/zulip,mahim97/zulip,punchagan/zulip,shubhamdhama/zulip,eeshangarg/zulip,shubhamdhama/zulip,synicalsyntax/zulip,punchagan/zulip,punchagan/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,punchagan/zulip,andersk/zulip,hackerkid/zulip,zulip/zulip,dhcrzf/zulip,hackerkid/zulip,kou/zulip,tommyip/zulip,dhcrzf/zulip,tommyip/zulip,brainwane/zulip,brainwane/zulip,punchagan/zulip,eeshangarg/zulip,shubhamdhama/zulip,rishig/zulip,jackrzhang/zulip,jackrzhang/zulip,jackrzhang/zulip,dhcrzf/zulip,dhcrzf/zulip,rht/zulip,rht/zulip,timabbott/zulip,kou/zulip,mahim97/zulip
|
migrations: Replace special chars in stream & user names with space.
Also handle potential duplicate stream names created.
Fixes #6534
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0125_realm_max_invites'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
<commit_before><commit_msg>migrations: Replace special chars in stream & user names with space.
Also handle potential duplicate stream names created.
Fixes #6534<commit_after>
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0125_realm_max_invites'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
migrations: Replace special chars in stream & user names with space.
Also handle potential duplicate stream names created.
Fixes #6534# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0125_realm_max_invites'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
<commit_before><commit_msg>migrations: Replace special chars in stream & user names with space.
Also handle potential duplicate stream names created.
Fixes #6534<commit_after># -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0125_realm_max_invites'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
|
abc46027b8af2d21debe8a23968b964ab6cb5c6b
|
CSP-Browser/CSPBrowser.py
|
CSP-Browser/CSPBrowser.py
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
def shutdown(self):
self.driver.close()
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
self.shutdown()
def shutdown(self):
self.driver.close()
|
Make Selenium shut down after it's done
|
Make Selenium shut down after it's done
|
Python
|
mit
|
reedox/CSPTools,Kennysan/CSPTools
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
def shutdown(self):
self.driver.close()Make Selenium shut down after it's done
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
self.shutdown()
def shutdown(self):
self.driver.close()
|
<commit_before>#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
def shutdown(self):
self.driver.close()<commit_msg>Make Selenium shut down after it's done<commit_after>
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
self.shutdown()
def shutdown(self):
self.driver.close()
|
#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
def shutdown(self):
self.driver.close()Make Selenium shut down after it's done#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
self.shutdown()
def shutdown(self):
self.driver.close()
|
<commit_before>#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
def shutdown(self):
self.driver.close()<commit_msg>Make Selenium shut down after it's done<commit_after>#!/usr/bin/python
from pyvirtualdisplay import Display
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
import re
import atexit
disp = Display(visible=0, size=(800,600))
atexit.register(lambda: disp.stop())
disp.start()
class CSPBrowser:
schema = re.compile('^https?://', re.IGNORECASE)
def __init__(self, port=None, domain=None):
profile = webdriver.FirefoxProfile()
if port != None and domain != None:
profile.set_preference("network.proxy.type",1)
profile.set_preference("network.proxy.http", domain)
profile.set_preference("network.proxy.http_port", port)
profile.update_preferences()
self.driver = webdriver.Firefox(firefox_profile=profile)
def load(self, urllist):
self.urllist = []
for url in urllist:
if not self.schema.match(url):
url = 'http://' + url
self.urllist.append(url)
def run(self):
for url in self.urllist:
print url
if (not len(url)): continue
#print "Visiting: " + url
self.driver.get(url)
self.driver.get('about:blank')
self.shutdown()
def shutdown(self):
self.driver.close()
|
e963ba18ec0b2d403a8a99d6374f42245a30e20d
|
spicedham/basewrapper.py
|
spicedham/basewrapper.py
|
class BaseWrapper(object):
def set_up():
pass
def get_key(key, default=None):
raise NotImplementedError()
def get_key_list(keys, default=None):
return [ get_key(key, default) for key in keys ]
def set_key_list(key_value_tuples):
return [ set_key(key, value) for key, value in key_value_tuples ]
def set_key(key, value):
raise NotImplementedError()
|
Add a base class for the backends
|
Add a base class for the backends
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
Add a base class for the backends
|
class BaseWrapper(object):
def set_up():
pass
def get_key(key, default=None):
raise NotImplementedError()
def get_key_list(keys, default=None):
return [ get_key(key, default) for key in keys ]
def set_key_list(key_value_tuples):
return [ set_key(key, value) for key, value in key_value_tuples ]
def set_key(key, value):
raise NotImplementedError()
|
<commit_before><commit_msg>Add a base class for the backends<commit_after>
|
class BaseWrapper(object):
def set_up():
pass
def get_key(key, default=None):
raise NotImplementedError()
def get_key_list(keys, default=None):
return [ get_key(key, default) for key in keys ]
def set_key_list(key_value_tuples):
return [ set_key(key, value) for key, value in key_value_tuples ]
def set_key(key, value):
raise NotImplementedError()
|
Add a base class for the backends
class BaseWrapper(object):
def set_up():
pass
def get_key(key, default=None):
raise NotImplementedError()
def get_key_list(keys, default=None):
return [ get_key(key, default) for key in keys ]
def set_key_list(key_value_tuples):
return [ set_key(key, value) for key, value in key_value_tuples ]
def set_key(key, value):
raise NotImplementedError()
|
<commit_before><commit_msg>Add a base class for the backends<commit_after>
class BaseWrapper(object):
def set_up():
pass
def get_key(key, default=None):
raise NotImplementedError()
def get_key_list(keys, default=None):
return [ get_key(key, default) for key in keys ]
def set_key_list(key_value_tuples):
return [ set_key(key, value) for key, value in key_value_tuples ]
def set_key(key, value):
raise NotImplementedError()
|
|
13c490a10c8eb7b84583c2713843969bcdc035e1
|
bluebottle/members/migrations/0029_merge_20190222_0930.py
|
bluebottle/members/migrations/0029_merge_20190222_0930.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-22 08:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0028_auto_20190219_1024'),
('members', '0028_merge_20190215_1441'),
]
operations = [
]
|
Make migrate file database members
|
Make migrate file database members
- Migrate field last_logout to member & field scim_external_id to member
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Make migrate file database members
- Migrate field last_logout to member & field scim_external_id to member
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-22 08:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0028_auto_20190219_1024'),
('members', '0028_merge_20190215_1441'),
]
operations = [
]
|
<commit_before><commit_msg>Make migrate file database members
- Migrate field last_logout to member & field scim_external_id to member<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-22 08:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0028_auto_20190219_1024'),
('members', '0028_merge_20190215_1441'),
]
operations = [
]
|
Make migrate file database members
- Migrate field last_logout to member & field scim_external_id to member# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-22 08:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0028_auto_20190219_1024'),
('members', '0028_merge_20190215_1441'),
]
operations = [
]
|
<commit_before><commit_msg>Make migrate file database members
- Migrate field last_logout to member & field scim_external_id to member<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-02-22 08:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('members', '0028_auto_20190219_1024'),
('members', '0028_merge_20190215_1441'),
]
operations = [
]
|
|
f2c79668789dd2c9f7dcfe2bf12bb34d30516962
|
bluebottle/projects/migrations/0084_auto_20181207_1435.py
|
bluebottle/projects/migrations/0084_auto_20181207_1435.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-07 13:35
from __future__ import unicode_literals
import bluebottle.utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0083_auto_20181129_1506'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='bank_details_reviewed',
),
migrations.AlterField(
model_name='projectcreatetemplate',
name='default_amount_asked',
field=bluebottle.utils.fields.MoneyField(blank=True, currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=None, max_digits=12, null=True),
),
]
|
Add migration that removes bank_account_reviewed flag on project
|
Add migration that removes bank_account_reviewed flag on project
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add migration that removes bank_account_reviewed flag on project
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-07 13:35
from __future__ import unicode_literals
import bluebottle.utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0083_auto_20181129_1506'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='bank_details_reviewed',
),
migrations.AlterField(
model_name='projectcreatetemplate',
name='default_amount_asked',
field=bluebottle.utils.fields.MoneyField(blank=True, currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=None, max_digits=12, null=True),
),
]
|
<commit_before><commit_msg>Add migration that removes bank_account_reviewed flag on project<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-07 13:35
from __future__ import unicode_literals
import bluebottle.utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0083_auto_20181129_1506'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='bank_details_reviewed',
),
migrations.AlterField(
model_name='projectcreatetemplate',
name='default_amount_asked',
field=bluebottle.utils.fields.MoneyField(blank=True, currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=None, max_digits=12, null=True),
),
]
|
Add migration that removes bank_account_reviewed flag on project# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-07 13:35
from __future__ import unicode_literals
import bluebottle.utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0083_auto_20181129_1506'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='bank_details_reviewed',
),
migrations.AlterField(
model_name='projectcreatetemplate',
name='default_amount_asked',
field=bluebottle.utils.fields.MoneyField(blank=True, currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=None, max_digits=12, null=True),
),
]
|
<commit_before><commit_msg>Add migration that removes bank_account_reviewed flag on project<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-07 13:35
from __future__ import unicode_literals
import bluebottle.utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0083_auto_20181129_1506'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='bank_details_reviewed',
),
migrations.AlterField(
model_name='projectcreatetemplate',
name='default_amount_asked',
field=bluebottle.utils.fields.MoneyField(blank=True, currency_choices="[('EUR', u'Euro')]", decimal_places=2, default=None, max_digits=12, null=True),
),
]
|
|
8ebba2af62f7b917427fa1233ad81314f4e47102
|
shade/tests/unit/test_operator_noauth.py
|
shade/tests/unit/test_operator_noauth.py
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import ironicclient
import shade
from shade.tests import base
class TestShadeOperatorNoAuth(base.TestCase):
def setUp(self):
"""Setup Noauth OperatorCloud tests
Setup the test to utilize no authentication and an endpoint
URL in the auth data. This is permits testing of the basic
mechanism that enables Ironic noauth mode to be utilized with
Shade.
"""
super(TestShadeOperatorNoAuth, self).setUp()
self.cloud_noauth = shade.operator_cloud(
auth_type='None',
auth=dict(endpoint="http://localhost:6385")
)
@mock.patch.object(shade.OperatorCloud, 'get_endpoint')
@mock.patch.object(ironicclient.client, 'Client')
def test_ironic_noauth_selection_using_a_task(
self, mock_client, mock_endpoint):
"""Test noauth selection for Ironic in OperatorCloud
Utilize a task to trigger the client connection attempt
and evaluate if get_endpoint was called while the client
was still called.
"""
self.cloud_noauth.patch_machine('name', {})
self.assertFalse(mock_endpoint.called)
self.assertTrue(mock_client.called)
|
Add test of OperatorCloud auth_type=None
|
Add test of OperatorCloud auth_type=None
Ironic features a noauth mode which is intended for use in isolated
trusted environments. As this is not a normal use case for shade,
it is an increadibly important item to have a test for.
Change-Id: If86b9df238982d912105fb08dcd59c9c85b7de4a
|
Python
|
apache-2.0
|
stackforge/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,openstack-infra/shade,dtroyer/python-openstacksdk,openstack-infra/shade,openstack/python-openstacksdk,jsmartin/shade,jsmartin/shade,openstack/python-openstacksdk
|
Add test of OperatorCloud auth_type=None
Ironic features a noauth mode which is intended for use in isolated
trusted environments. As this is not a normal use case for shade,
it is an increadibly important item to have a test for.
Change-Id: If86b9df238982d912105fb08dcd59c9c85b7de4a
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import ironicclient
import shade
from shade.tests import base
class TestShadeOperatorNoAuth(base.TestCase):
def setUp(self):
"""Setup Noauth OperatorCloud tests
Setup the test to utilize no authentication and an endpoint
URL in the auth data. This is permits testing of the basic
mechanism that enables Ironic noauth mode to be utilized with
Shade.
"""
super(TestShadeOperatorNoAuth, self).setUp()
self.cloud_noauth = shade.operator_cloud(
auth_type='None',
auth=dict(endpoint="http://localhost:6385")
)
@mock.patch.object(shade.OperatorCloud, 'get_endpoint')
@mock.patch.object(ironicclient.client, 'Client')
def test_ironic_noauth_selection_using_a_task(
self, mock_client, mock_endpoint):
"""Test noauth selection for Ironic in OperatorCloud
Utilize a task to trigger the client connection attempt
and evaluate if get_endpoint was called while the client
was still called.
"""
self.cloud_noauth.patch_machine('name', {})
self.assertFalse(mock_endpoint.called)
self.assertTrue(mock_client.called)
|
<commit_before><commit_msg>Add test of OperatorCloud auth_type=None
Ironic features a noauth mode which is intended for use in isolated
trusted environments. As this is not a normal use case for shade,
it is an increadibly important item to have a test for.
Change-Id: If86b9df238982d912105fb08dcd59c9c85b7de4a<commit_after>
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import ironicclient
import shade
from shade.tests import base
class TestShadeOperatorNoAuth(base.TestCase):
def setUp(self):
"""Setup Noauth OperatorCloud tests
Setup the test to utilize no authentication and an endpoint
URL in the auth data. This is permits testing of the basic
mechanism that enables Ironic noauth mode to be utilized with
Shade.
"""
super(TestShadeOperatorNoAuth, self).setUp()
self.cloud_noauth = shade.operator_cloud(
auth_type='None',
auth=dict(endpoint="http://localhost:6385")
)
@mock.patch.object(shade.OperatorCloud, 'get_endpoint')
@mock.patch.object(ironicclient.client, 'Client')
def test_ironic_noauth_selection_using_a_task(
self, mock_client, mock_endpoint):
"""Test noauth selection for Ironic in OperatorCloud
Utilize a task to trigger the client connection attempt
and evaluate if get_endpoint was called while the client
was still called.
"""
self.cloud_noauth.patch_machine('name', {})
self.assertFalse(mock_endpoint.called)
self.assertTrue(mock_client.called)
|
Add test of OperatorCloud auth_type=None
Ironic features a noauth mode which is intended for use in isolated
trusted environments. As this is not a normal use case for shade,
it is an increadibly important item to have a test for.
Change-Id: If86b9df238982d912105fb08dcd59c9c85b7de4a# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import ironicclient
import shade
from shade.tests import base
class TestShadeOperatorNoAuth(base.TestCase):
def setUp(self):
"""Setup Noauth OperatorCloud tests
Setup the test to utilize no authentication and an endpoint
URL in the auth data. This is permits testing of the basic
mechanism that enables Ironic noauth mode to be utilized with
Shade.
"""
super(TestShadeOperatorNoAuth, self).setUp()
self.cloud_noauth = shade.operator_cloud(
auth_type='None',
auth=dict(endpoint="http://localhost:6385")
)
@mock.patch.object(shade.OperatorCloud, 'get_endpoint')
@mock.patch.object(ironicclient.client, 'Client')
def test_ironic_noauth_selection_using_a_task(
self, mock_client, mock_endpoint):
"""Test noauth selection for Ironic in OperatorCloud
Utilize a task to trigger the client connection attempt
and evaluate if get_endpoint was called while the client
was still called.
"""
self.cloud_noauth.patch_machine('name', {})
self.assertFalse(mock_endpoint.called)
self.assertTrue(mock_client.called)
|
<commit_before><commit_msg>Add test of OperatorCloud auth_type=None
Ironic features a noauth mode which is intended for use in isolated
trusted environments. As this is not a normal use case for shade,
it is an increadibly important item to have a test for.
Change-Id: If86b9df238982d912105fb08dcd59c9c85b7de4a<commit_after># Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import ironicclient
import shade
from shade.tests import base
class TestShadeOperatorNoAuth(base.TestCase):
def setUp(self):
"""Setup Noauth OperatorCloud tests
Setup the test to utilize no authentication and an endpoint
URL in the auth data. This is permits testing of the basic
mechanism that enables Ironic noauth mode to be utilized with
Shade.
"""
super(TestShadeOperatorNoAuth, self).setUp()
self.cloud_noauth = shade.operator_cloud(
auth_type='None',
auth=dict(endpoint="http://localhost:6385")
)
@mock.patch.object(shade.OperatorCloud, 'get_endpoint')
@mock.patch.object(ironicclient.client, 'Client')
def test_ironic_noauth_selection_using_a_task(
self, mock_client, mock_endpoint):
"""Test noauth selection for Ironic in OperatorCloud
Utilize a task to trigger the client connection attempt
and evaluate if get_endpoint was called while the client
was still called.
"""
self.cloud_noauth.patch_machine('name', {})
self.assertFalse(mock_endpoint.called)
self.assertTrue(mock_client.called)
|
|
f7287bdf569374757880a70e4c2e3a69ed31295e
|
tests/test_cpan.py
|
tests/test_cpan.py
|
from tests.helper import ExternalVersionTestCase
class CPANTest(ExternalVersionTestCase):
def test_cpan(self):
self.assertEqual(self.sync_get_version("POE-Component-Server-HTTPServer", {"cpan": None}), "0.9.2")
|
Add a testcase for CPAN
|
Add a testcase for CPAN
|
Python
|
mit
|
lilydjwg/nvchecker
|
Add a testcase for CPAN
|
from tests.helper import ExternalVersionTestCase
class CPANTest(ExternalVersionTestCase):
def test_cpan(self):
self.assertEqual(self.sync_get_version("POE-Component-Server-HTTPServer", {"cpan": None}), "0.9.2")
|
<commit_before><commit_msg>Add a testcase for CPAN<commit_after>
|
from tests.helper import ExternalVersionTestCase
class CPANTest(ExternalVersionTestCase):
def test_cpan(self):
self.assertEqual(self.sync_get_version("POE-Component-Server-HTTPServer", {"cpan": None}), "0.9.2")
|
Add a testcase for CPANfrom tests.helper import ExternalVersionTestCase
class CPANTest(ExternalVersionTestCase):
def test_cpan(self):
self.assertEqual(self.sync_get_version("POE-Component-Server-HTTPServer", {"cpan": None}), "0.9.2")
|
<commit_before><commit_msg>Add a testcase for CPAN<commit_after>from tests.helper import ExternalVersionTestCase
class CPANTest(ExternalVersionTestCase):
def test_cpan(self):
self.assertEqual(self.sync_get_version("POE-Component-Server-HTTPServer", {"cpan": None}), "0.9.2")
|
|
a01d667946b65ca3ede55fa44a2c409cc129b737
|
misc/show-cairo-subpixel-behavior.py
|
misc/show-cairo-subpixel-behavior.py
|
import cairo
# this shows that even though we are using integer values
# for set_source_surface, the subpixel translation is
# capturing pixel values from adjacent pixels of the source
src = cairo.ImageSurface(cairo.FORMAT_RGB24, 60, 40)
cr = cairo.Context(src)
#cr.set_source_rgb(1, 0, 0)
#cr.rectangle(0, 0, 20, 40)
#cr.fill()
cr.set_source_rgb(0, 1, 0)
cr.rectangle(20, 0, 20, 40)
cr.fill()
#cr.set_source_rgb(0, 0, 1)
#cr.rectangle(40, 0, 20, 40)
#cr.fill()
src.write_to_png("src.png")
dst = cairo.ImageSurface(cairo.FORMAT_ARGB32, 400, 400)
cr = cairo.Context(dst)
cr.set_source_rgb(1, 1, 1)
cr.paint()
for z in range(0, 400):
cr.translate(0.01, 1)
cr.set_source_surface(src, -20, 0)
cr.rectangle(0, 0, 20, 40)
cr.fill()
dst.write_to_png("dst.png")
|
Add script to show what cairo is doing that is strange
|
Add script to show what cairo is doing that is strange
|
Python
|
lgpl-2.1
|
openslide/openslide,openslide/openslide,openslide/openslide,openslide/openslide
|
Add script to show what cairo is doing that is strange
|
import cairo
# this shows that even though we are using integer values
# for set_source_surface, the subpixel translation is
# capturing pixel values from adjacent pixels of the source
src = cairo.ImageSurface(cairo.FORMAT_RGB24, 60, 40)
cr = cairo.Context(src)
#cr.set_source_rgb(1, 0, 0)
#cr.rectangle(0, 0, 20, 40)
#cr.fill()
cr.set_source_rgb(0, 1, 0)
cr.rectangle(20, 0, 20, 40)
cr.fill()
#cr.set_source_rgb(0, 0, 1)
#cr.rectangle(40, 0, 20, 40)
#cr.fill()
src.write_to_png("src.png")
dst = cairo.ImageSurface(cairo.FORMAT_ARGB32, 400, 400)
cr = cairo.Context(dst)
cr.set_source_rgb(1, 1, 1)
cr.paint()
for z in range(0, 400):
cr.translate(0.01, 1)
cr.set_source_surface(src, -20, 0)
cr.rectangle(0, 0, 20, 40)
cr.fill()
dst.write_to_png("dst.png")
|
<commit_before><commit_msg>Add script to show what cairo is doing that is strange<commit_after>
|
import cairo
# this shows that even though we are using integer values
# for set_source_surface, the subpixel translation is
# capturing pixel values from adjacent pixels of the source
src = cairo.ImageSurface(cairo.FORMAT_RGB24, 60, 40)
cr = cairo.Context(src)
#cr.set_source_rgb(1, 0, 0)
#cr.rectangle(0, 0, 20, 40)
#cr.fill()
cr.set_source_rgb(0, 1, 0)
cr.rectangle(20, 0, 20, 40)
cr.fill()
#cr.set_source_rgb(0, 0, 1)
#cr.rectangle(40, 0, 20, 40)
#cr.fill()
src.write_to_png("src.png")
dst = cairo.ImageSurface(cairo.FORMAT_ARGB32, 400, 400)
cr = cairo.Context(dst)
cr.set_source_rgb(1, 1, 1)
cr.paint()
for z in range(0, 400):
cr.translate(0.01, 1)
cr.set_source_surface(src, -20, 0)
cr.rectangle(0, 0, 20, 40)
cr.fill()
dst.write_to_png("dst.png")
|
Add script to show what cairo is doing that is strangeimport cairo
# this shows that even though we are using integer values
# for set_source_surface, the subpixel translation is
# capturing pixel values from adjacent pixels of the source
src = cairo.ImageSurface(cairo.FORMAT_RGB24, 60, 40)
cr = cairo.Context(src)
#cr.set_source_rgb(1, 0, 0)
#cr.rectangle(0, 0, 20, 40)
#cr.fill()
cr.set_source_rgb(0, 1, 0)
cr.rectangle(20, 0, 20, 40)
cr.fill()
#cr.set_source_rgb(0, 0, 1)
#cr.rectangle(40, 0, 20, 40)
#cr.fill()
src.write_to_png("src.png")
dst = cairo.ImageSurface(cairo.FORMAT_ARGB32, 400, 400)
cr = cairo.Context(dst)
cr.set_source_rgb(1, 1, 1)
cr.paint()
for z in range(0, 400):
cr.translate(0.01, 1)
cr.set_source_surface(src, -20, 0)
cr.rectangle(0, 0, 20, 40)
cr.fill()
dst.write_to_png("dst.png")
|
<commit_before><commit_msg>Add script to show what cairo is doing that is strange<commit_after>import cairo
# this shows that even though we are using integer values
# for set_source_surface, the subpixel translation is
# capturing pixel values from adjacent pixels of the source
src = cairo.ImageSurface(cairo.FORMAT_RGB24, 60, 40)
cr = cairo.Context(src)
#cr.set_source_rgb(1, 0, 0)
#cr.rectangle(0, 0, 20, 40)
#cr.fill()
cr.set_source_rgb(0, 1, 0)
cr.rectangle(20, 0, 20, 40)
cr.fill()
#cr.set_source_rgb(0, 0, 1)
#cr.rectangle(40, 0, 20, 40)
#cr.fill()
src.write_to_png("src.png")
dst = cairo.ImageSurface(cairo.FORMAT_ARGB32, 400, 400)
cr = cairo.Context(dst)
cr.set_source_rgb(1, 1, 1)
cr.paint()
for z in range(0, 400):
cr.translate(0.01, 1)
cr.set_source_surface(src, -20, 0)
cr.rectangle(0, 0, 20, 40)
cr.fill()
dst.write_to_png("dst.png")
|
|
2e8728a0bfa6cac8f4a17c53ce762f82c4f60583
|
cherrypy/test/test_package.py
|
cherrypy/test/test_package.py
|
from jaraco.packaging import depends
def test_dependencies_no_namespaces():
"""
Until #1673 lands, ensure dependencies do not employ
namespace packages.
"""
deps = depends.load_dependencies('cherrypy')
names = map(package_name, traverse(deps))
assert not any(name.startswith('jaraco.') for name in names)
def package_name(dep):
name, sep, ver = dep['resolved'].partition(' ')
return name
def traverse(pkg):
yield pkg
for group in map(traverse, pkg.get('depends', [])):
for child in group:
yield child
|
Add test module, omitted from prior commit.
|
Add test module, omitted from prior commit.
|
Python
|
bsd-3-clause
|
cherrypy/cherrypy,Safihre/cherrypy,cherrypy/cherrypy,Safihre/cherrypy
|
Add test module, omitted from prior commit.
|
from jaraco.packaging import depends
def test_dependencies_no_namespaces():
"""
Until #1673 lands, ensure dependencies do not employ
namespace packages.
"""
deps = depends.load_dependencies('cherrypy')
names = map(package_name, traverse(deps))
assert not any(name.startswith('jaraco.') for name in names)
def package_name(dep):
name, sep, ver = dep['resolved'].partition(' ')
return name
def traverse(pkg):
yield pkg
for group in map(traverse, pkg.get('depends', [])):
for child in group:
yield child
|
<commit_before><commit_msg>Add test module, omitted from prior commit.<commit_after>
|
from jaraco.packaging import depends
def test_dependencies_no_namespaces():
"""
Until #1673 lands, ensure dependencies do not employ
namespace packages.
"""
deps = depends.load_dependencies('cherrypy')
names = map(package_name, traverse(deps))
assert not any(name.startswith('jaraco.') for name in names)
def package_name(dep):
name, sep, ver = dep['resolved'].partition(' ')
return name
def traverse(pkg):
yield pkg
for group in map(traverse, pkg.get('depends', [])):
for child in group:
yield child
|
Add test module, omitted from prior commit.from jaraco.packaging import depends
def test_dependencies_no_namespaces():
"""
Until #1673 lands, ensure dependencies do not employ
namespace packages.
"""
deps = depends.load_dependencies('cherrypy')
names = map(package_name, traverse(deps))
assert not any(name.startswith('jaraco.') for name in names)
def package_name(dep):
name, sep, ver = dep['resolved'].partition(' ')
return name
def traverse(pkg):
yield pkg
for group in map(traverse, pkg.get('depends', [])):
for child in group:
yield child
|
<commit_before><commit_msg>Add test module, omitted from prior commit.<commit_after>from jaraco.packaging import depends
def test_dependencies_no_namespaces():
"""
Until #1673 lands, ensure dependencies do not employ
namespace packages.
"""
deps = depends.load_dependencies('cherrypy')
names = map(package_name, traverse(deps))
assert not any(name.startswith('jaraco.') for name in names)
def package_name(dep):
name, sep, ver = dep['resolved'].partition(' ')
return name
def traverse(pkg):
yield pkg
for group in map(traverse, pkg.get('depends', [])):
for child in group:
yield child
|
|
b282434e61cd1a185806b985953d1543a57bca8f
|
skan/_testdata.py
|
skan/_testdata.py
|
import numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
|
Add some tiny synthetic data for testing
|
Add some tiny synthetic data for testing
|
Python
|
bsd-3-clause
|
jni/skan
|
Add some tiny synthetic data for testing
|
import numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
|
<commit_before><commit_msg>Add some tiny synthetic data for testing<commit_after>
|
import numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
|
Add some tiny synthetic data for testingimport numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
|
<commit_before><commit_msg>Add some tiny synthetic data for testing<commit_after>import numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
|
|
ef294ced9a9500344e02b046a2d4dd3b9621229d
|
tests/t_utils/test_filters.py
|
tests/t_utils/test_filters.py
|
# -*- coding: utf-8 -*-
"""
tests.t_utils.test_filters
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by The Stormrose Project team, see AUTHORS.
:license: MIT License, see LICENSE for details.
"""
import os
from unittest import TestCase
from xobox.utils import filters
class TestXoboxUtilsFilters(TestCase):
"""
Unit tests for :py:mod:`xobox.utils.filters`
"""
def test_01(self):
"""
Test Case 01:
Detect test modules in current path.
Test is passed if the returned list matches with the expected result.
"""
test_path = os.path.dirname(os.path.realpath(__file__))
result = []
expected = [
'test_compat.py',
'test_convert.py',
'test_dynamic.py',
'test_filters.py',
'test_loader.py',
'test_singleton.py',
'test_termcolor.py',
'test_timer.py',
'test_version.py'
]
for root, dirs, files in os.walk(test_path):
result += list(filter(filters.files, files))
self.assertListEqual(result, expected)
|
Add unit test for xobox.utils.filters.file
|
Add unit test for xobox.utils.filters.file
|
Python
|
mit
|
stormrose-va/xobox
|
Add unit test for xobox.utils.filters.file
|
# -*- coding: utf-8 -*-
"""
tests.t_utils.test_filters
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by The Stormrose Project team, see AUTHORS.
:license: MIT License, see LICENSE for details.
"""
import os
from unittest import TestCase
from xobox.utils import filters
class TestXoboxUtilsFilters(TestCase):
"""
Unit tests for :py:mod:`xobox.utils.filters`
"""
def test_01(self):
"""
Test Case 01:
Detect test modules in current path.
Test is passed if the returned list matches with the expected result.
"""
test_path = os.path.dirname(os.path.realpath(__file__))
result = []
expected = [
'test_compat.py',
'test_convert.py',
'test_dynamic.py',
'test_filters.py',
'test_loader.py',
'test_singleton.py',
'test_termcolor.py',
'test_timer.py',
'test_version.py'
]
for root, dirs, files in os.walk(test_path):
result += list(filter(filters.files, files))
self.assertListEqual(result, expected)
|
<commit_before><commit_msg>Add unit test for xobox.utils.filters.file<commit_after>
|
# -*- coding: utf-8 -*-
"""
tests.t_utils.test_filters
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by The Stormrose Project team, see AUTHORS.
:license: MIT License, see LICENSE for details.
"""
import os
from unittest import TestCase
from xobox.utils import filters
class TestXoboxUtilsFilters(TestCase):
"""
Unit tests for :py:mod:`xobox.utils.filters`
"""
def test_01(self):
"""
Test Case 01:
Detect test modules in current path.
Test is passed if the returned list matches with the expected result.
"""
test_path = os.path.dirname(os.path.realpath(__file__))
result = []
expected = [
'test_compat.py',
'test_convert.py',
'test_dynamic.py',
'test_filters.py',
'test_loader.py',
'test_singleton.py',
'test_termcolor.py',
'test_timer.py',
'test_version.py'
]
for root, dirs, files in os.walk(test_path):
result += list(filter(filters.files, files))
self.assertListEqual(result, expected)
|
Add unit test for xobox.utils.filters.file# -*- coding: utf-8 -*-
"""
tests.t_utils.test_filters
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by The Stormrose Project team, see AUTHORS.
:license: MIT License, see LICENSE for details.
"""
import os
from unittest import TestCase
from xobox.utils import filters
class TestXoboxUtilsFilters(TestCase):
"""
Unit tests for :py:mod:`xobox.utils.filters`
"""
def test_01(self):
"""
Test Case 01:
Detect test modules in current path.
Test is passed if the returned list matches with the expected result.
"""
test_path = os.path.dirname(os.path.realpath(__file__))
result = []
expected = [
'test_compat.py',
'test_convert.py',
'test_dynamic.py',
'test_filters.py',
'test_loader.py',
'test_singleton.py',
'test_termcolor.py',
'test_timer.py',
'test_version.py'
]
for root, dirs, files in os.walk(test_path):
result += list(filter(filters.files, files))
self.assertListEqual(result, expected)
|
<commit_before><commit_msg>Add unit test for xobox.utils.filters.file<commit_after># -*- coding: utf-8 -*-
"""
tests.t_utils.test_filters
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by The Stormrose Project team, see AUTHORS.
:license: MIT License, see LICENSE for details.
"""
import os
from unittest import TestCase
from xobox.utils import filters
class TestXoboxUtilsFilters(TestCase):
"""
Unit tests for :py:mod:`xobox.utils.filters`
"""
def test_01(self):
"""
Test Case 01:
Detect test modules in current path.
Test is passed if the returned list matches with the expected result.
"""
test_path = os.path.dirname(os.path.realpath(__file__))
result = []
expected = [
'test_compat.py',
'test_convert.py',
'test_dynamic.py',
'test_filters.py',
'test_loader.py',
'test_singleton.py',
'test_termcolor.py',
'test_timer.py',
'test_version.py'
]
for root, dirs, files in os.walk(test_path):
result += list(filter(filters.files, files))
self.assertListEqual(result, expected)
|
|
b2158dcd6aaae9c44503b509ddcb5187ea37bdba
|
api/production_pending.py
|
api/production_pending.py
|
from constants import *
import sys, time, datetime
from scraper import Scraper
from dateutil import tz
from execute_query import execute_query
try:
guild_ids = str(sys.argv[1]).split(',')
print '[INFO] [{0}] - Not using all guild IDs, but only the following: {1}'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),', '.join(guild_ids))
except Exception:
guild_ids = False
start_time = time.time()
keep_going = True
while keep_going:
guild_ids = [str(i[0]) for i in execute_query('SELECT guild_id FROM pending_changes')]
if not guild_ids:
print 'No pending guilds. Sleeping for one minute.'
time.sleep(60)
else:
print 'Refreshing {0} pending guilds.'.format(len(guild_ids))
guilds = Scraper('production',guild_ids,start_time,'tornado')
keep_going = guilds.run()
execute_query('DELETE FROM pending_changes WHERE guild_id IN ({0})'.format(','.join(guild_ids)))
if time.time() - start_time > MAXIMUM_RUNTIME: keep_going = False
print '[INFO] [{0}] - {1}. Aborting now.'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),'Reached the maximum runtime' if (time.time() - start_time >= MAXIMUM_RUNTIME) else 'Done')
|
Add script to process pending changes separately from the regular refresh cycle
|
Add script to process pending changes separately from the regular refresh cycle
|
Python
|
mit
|
Sheday/GuildAudit,Sheday/GuildAudit
|
Add script to process pending changes separately from the regular refresh cycle
|
from constants import *
import sys, time, datetime
from scraper import Scraper
from dateutil import tz
from execute_query import execute_query
try:
guild_ids = str(sys.argv[1]).split(',')
print '[INFO] [{0}] - Not using all guild IDs, but only the following: {1}'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),', '.join(guild_ids))
except Exception:
guild_ids = False
start_time = time.time()
keep_going = True
while keep_going:
guild_ids = [str(i[0]) for i in execute_query('SELECT guild_id FROM pending_changes')]
if not guild_ids:
print 'No pending guilds. Sleeping for one minute.'
time.sleep(60)
else:
print 'Refreshing {0} pending guilds.'.format(len(guild_ids))
guilds = Scraper('production',guild_ids,start_time,'tornado')
keep_going = guilds.run()
execute_query('DELETE FROM pending_changes WHERE guild_id IN ({0})'.format(','.join(guild_ids)))
if time.time() - start_time > MAXIMUM_RUNTIME: keep_going = False
print '[INFO] [{0}] - {1}. Aborting now.'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),'Reached the maximum runtime' if (time.time() - start_time >= MAXIMUM_RUNTIME) else 'Done')
|
<commit_before><commit_msg>Add script to process pending changes separately from the regular refresh cycle<commit_after>
|
from constants import *
import sys, time, datetime
from scraper import Scraper
from dateutil import tz
from execute_query import execute_query
try:
guild_ids = str(sys.argv[1]).split(',')
print '[INFO] [{0}] - Not using all guild IDs, but only the following: {1}'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),', '.join(guild_ids))
except Exception:
guild_ids = False
start_time = time.time()
keep_going = True
while keep_going:
guild_ids = [str(i[0]) for i in execute_query('SELECT guild_id FROM pending_changes')]
if not guild_ids:
print 'No pending guilds. Sleeping for one minute.'
time.sleep(60)
else:
print 'Refreshing {0} pending guilds.'.format(len(guild_ids))
guilds = Scraper('production',guild_ids,start_time,'tornado')
keep_going = guilds.run()
execute_query('DELETE FROM pending_changes WHERE guild_id IN ({0})'.format(','.join(guild_ids)))
if time.time() - start_time > MAXIMUM_RUNTIME: keep_going = False
print '[INFO] [{0}] - {1}. Aborting now.'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),'Reached the maximum runtime' if (time.time() - start_time >= MAXIMUM_RUNTIME) else 'Done')
|
Add script to process pending changes separately from the regular refresh cyclefrom constants import *
import sys, time, datetime
from scraper import Scraper
from dateutil import tz
from execute_query import execute_query
try:
guild_ids = str(sys.argv[1]).split(',')
print '[INFO] [{0}] - Not using all guild IDs, but only the following: {1}'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),', '.join(guild_ids))
except Exception:
guild_ids = False
start_time = time.time()
keep_going = True
while keep_going:
guild_ids = [str(i[0]) for i in execute_query('SELECT guild_id FROM pending_changes')]
if not guild_ids:
print 'No pending guilds. Sleeping for one minute.'
time.sleep(60)
else:
print 'Refreshing {0} pending guilds.'.format(len(guild_ids))
guilds = Scraper('production',guild_ids,start_time,'tornado')
keep_going = guilds.run()
execute_query('DELETE FROM pending_changes WHERE guild_id IN ({0})'.format(','.join(guild_ids)))
if time.time() - start_time > MAXIMUM_RUNTIME: keep_going = False
print '[INFO] [{0}] - {1}. Aborting now.'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),'Reached the maximum runtime' if (time.time() - start_time >= MAXIMUM_RUNTIME) else 'Done')
|
<commit_before><commit_msg>Add script to process pending changes separately from the regular refresh cycle<commit_after>from constants import *
import sys, time, datetime
from scraper import Scraper
from dateutil import tz
from execute_query import execute_query
try:
guild_ids = str(sys.argv[1]).split(',')
print '[INFO] [{0}] - Not using all guild IDs, but only the following: {1}'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),', '.join(guild_ids))
except Exception:
guild_ids = False
start_time = time.time()
keep_going = True
while keep_going:
guild_ids = [str(i[0]) for i in execute_query('SELECT guild_id FROM pending_changes')]
if not guild_ids:
print 'No pending guilds. Sleeping for one minute.'
time.sleep(60)
else:
print 'Refreshing {0} pending guilds.'.format(len(guild_ids))
guilds = Scraper('production',guild_ids,start_time,'tornado')
keep_going = guilds.run()
execute_query('DELETE FROM pending_changes WHERE guild_id IN ({0})'.format(','.join(guild_ids)))
if time.time() - start_time > MAXIMUM_RUNTIME: keep_going = False
print '[INFO] [{0}] - {1}. Aborting now.'.format(datetime.datetime.utcnow().replace(tzinfo=tz.gettz('UTC')).astimezone(tz.gettz(TIME_ZONE)).strftime('%d-%m %H:%M:%S'),'Reached the maximum runtime' if (time.time() - start_time >= MAXIMUM_RUNTIME) else 'Done')
|
|
173423ee7f43fb7a33db847e2ef3abb34317944e
|
tests/test_decorators.py
|
tests/test_decorators.py
|
from dtest import *
from dtest.util import *
@skip
def test_skip():
pass
@failing
def test_failing():
pass
@attr(attr1=1, attr2=2)
def test_attr():
pass
@depends(test_skip, test_failing, test_attr)
def test_depends():
pass
class DecoratorTestException(Exception):
pass
@raises(DecoratorTestException)
def test_raises():
raise DecoratorTestException()
@timed(1)
def test_timed():
pass
class TestDecorators(DTestCase):
@istest
def skip(self):
# Verify that skip is true...
assert_true(test_skip.skip)
# Verify that it's false on something else
assert_false(test_failing.skip)
@istest
def failing(self):
# Verify that failing is true...
assert_true(test_failing.failing)
# Verify that it's false on something else
assert_false(test_skip.failing)
@istest
def attr(self):
# Verify that the attributes are set as expected
assert_equal(test_attr.attr1, 1)
assert_equal(test_attr.attr2, 2)
@istest
def depends(self):
# Part 1: Verify that test_depends() is dependent on
# test_skip(), test_failing(), and test_attr()
assert_in(test_skip, test_depends.dependencies)
assert_in(test_failing, test_depends.dependencies)
assert_in(test_attr, test_depends.dependencies)
# Part 2: Verify that test_depends() is in the depedents set
# of test_skip(), test_failing(), and test_attr()
assert_in(test_depends, test_skip.dependents)
assert_in(test_depends, test_failing.dependents)
assert_in(test_depends, test_attr.dependents)
@istest
def raises(self):
# Verify that the set of expected exceptions is as expected
assert_set_equal(test_raises.raises, set([DecoratorTestException]))
# Verify that it's the empty set on something else
assert_set_equal(test_timed.raises, set())
@istest
def timed(self):
# Verify that the timeout is set properly
assert_equal(test_timed.timeout, 1)
# Verify that it's None on something else
assert_is_none(test_raises.timeout)
|
Add tests to verify the proper functioning of decorators
|
Add tests to verify the proper functioning of decorators
|
Python
|
apache-2.0
|
klmitch/dtest,klmitch/dtest
|
Add tests to verify the proper functioning of decorators
|
from dtest import *
from dtest.util import *
@skip
def test_skip():
pass
@failing
def test_failing():
pass
@attr(attr1=1, attr2=2)
def test_attr():
pass
@depends(test_skip, test_failing, test_attr)
def test_depends():
pass
class DecoratorTestException(Exception):
pass
@raises(DecoratorTestException)
def test_raises():
raise DecoratorTestException()
@timed(1)
def test_timed():
pass
class TestDecorators(DTestCase):
@istest
def skip(self):
# Verify that skip is true...
assert_true(test_skip.skip)
# Verify that it's false on something else
assert_false(test_failing.skip)
@istest
def failing(self):
# Verify that failing is true...
assert_true(test_failing.failing)
# Verify that it's false on something else
assert_false(test_skip.failing)
@istest
def attr(self):
# Verify that the attributes are set as expected
assert_equal(test_attr.attr1, 1)
assert_equal(test_attr.attr2, 2)
@istest
def depends(self):
# Part 1: Verify that test_depends() is dependent on
# test_skip(), test_failing(), and test_attr()
assert_in(test_skip, test_depends.dependencies)
assert_in(test_failing, test_depends.dependencies)
assert_in(test_attr, test_depends.dependencies)
# Part 2: Verify that test_depends() is in the depedents set
# of test_skip(), test_failing(), and test_attr()
assert_in(test_depends, test_skip.dependents)
assert_in(test_depends, test_failing.dependents)
assert_in(test_depends, test_attr.dependents)
@istest
def raises(self):
# Verify that the set of expected exceptions is as expected
assert_set_equal(test_raises.raises, set([DecoratorTestException]))
# Verify that it's the empty set on something else
assert_set_equal(test_timed.raises, set())
@istest
def timed(self):
# Verify that the timeout is set properly
assert_equal(test_timed.timeout, 1)
# Verify that it's None on something else
assert_is_none(test_raises.timeout)
|
<commit_before><commit_msg>Add tests to verify the proper functioning of decorators<commit_after>
|
from dtest import *
from dtest.util import *
@skip
def test_skip():
pass
@failing
def test_failing():
pass
@attr(attr1=1, attr2=2)
def test_attr():
pass
@depends(test_skip, test_failing, test_attr)
def test_depends():
pass
class DecoratorTestException(Exception):
pass
@raises(DecoratorTestException)
def test_raises():
raise DecoratorTestException()
@timed(1)
def test_timed():
pass
class TestDecorators(DTestCase):
@istest
def skip(self):
# Verify that skip is true...
assert_true(test_skip.skip)
# Verify that it's false on something else
assert_false(test_failing.skip)
@istest
def failing(self):
# Verify that failing is true...
assert_true(test_failing.failing)
# Verify that it's false on something else
assert_false(test_skip.failing)
@istest
def attr(self):
# Verify that the attributes are set as expected
assert_equal(test_attr.attr1, 1)
assert_equal(test_attr.attr2, 2)
@istest
def depends(self):
# Part 1: Verify that test_depends() is dependent on
# test_skip(), test_failing(), and test_attr()
assert_in(test_skip, test_depends.dependencies)
assert_in(test_failing, test_depends.dependencies)
assert_in(test_attr, test_depends.dependencies)
# Part 2: Verify that test_depends() is in the depedents set
# of test_skip(), test_failing(), and test_attr()
assert_in(test_depends, test_skip.dependents)
assert_in(test_depends, test_failing.dependents)
assert_in(test_depends, test_attr.dependents)
@istest
def raises(self):
# Verify that the set of expected exceptions is as expected
assert_set_equal(test_raises.raises, set([DecoratorTestException]))
# Verify that it's the empty set on something else
assert_set_equal(test_timed.raises, set())
@istest
def timed(self):
# Verify that the timeout is set properly
assert_equal(test_timed.timeout, 1)
# Verify that it's None on something else
assert_is_none(test_raises.timeout)
|
Add tests to verify the proper functioning of decoratorsfrom dtest import *
from dtest.util import *
@skip
def test_skip():
pass
@failing
def test_failing():
pass
@attr(attr1=1, attr2=2)
def test_attr():
pass
@depends(test_skip, test_failing, test_attr)
def test_depends():
pass
class DecoratorTestException(Exception):
pass
@raises(DecoratorTestException)
def test_raises():
raise DecoratorTestException()
@timed(1)
def test_timed():
pass
class TestDecorators(DTestCase):
@istest
def skip(self):
# Verify that skip is true...
assert_true(test_skip.skip)
# Verify that it's false on something else
assert_false(test_failing.skip)
@istest
def failing(self):
# Verify that failing is true...
assert_true(test_failing.failing)
# Verify that it's false on something else
assert_false(test_skip.failing)
@istest
def attr(self):
# Verify that the attributes are set as expected
assert_equal(test_attr.attr1, 1)
assert_equal(test_attr.attr2, 2)
@istest
def depends(self):
# Part 1: Verify that test_depends() is dependent on
# test_skip(), test_failing(), and test_attr()
assert_in(test_skip, test_depends.dependencies)
assert_in(test_failing, test_depends.dependencies)
assert_in(test_attr, test_depends.dependencies)
# Part 2: Verify that test_depends() is in the depedents set
# of test_skip(), test_failing(), and test_attr()
assert_in(test_depends, test_skip.dependents)
assert_in(test_depends, test_failing.dependents)
assert_in(test_depends, test_attr.dependents)
@istest
def raises(self):
# Verify that the set of expected exceptions is as expected
assert_set_equal(test_raises.raises, set([DecoratorTestException]))
# Verify that it's the empty set on something else
assert_set_equal(test_timed.raises, set())
@istest
def timed(self):
# Verify that the timeout is set properly
assert_equal(test_timed.timeout, 1)
# Verify that it's None on something else
assert_is_none(test_raises.timeout)
|
<commit_before><commit_msg>Add tests to verify the proper functioning of decorators<commit_after>from dtest import *
from dtest.util import *
@skip
def test_skip():
pass
@failing
def test_failing():
pass
@attr(attr1=1, attr2=2)
def test_attr():
pass
@depends(test_skip, test_failing, test_attr)
def test_depends():
pass
class DecoratorTestException(Exception):
pass
@raises(DecoratorTestException)
def test_raises():
raise DecoratorTestException()
@timed(1)
def test_timed():
pass
class TestDecorators(DTestCase):
@istest
def skip(self):
# Verify that skip is true...
assert_true(test_skip.skip)
# Verify that it's false on something else
assert_false(test_failing.skip)
@istest
def failing(self):
# Verify that failing is true...
assert_true(test_failing.failing)
# Verify that it's false on something else
assert_false(test_skip.failing)
@istest
def attr(self):
# Verify that the attributes are set as expected
assert_equal(test_attr.attr1, 1)
assert_equal(test_attr.attr2, 2)
@istest
def depends(self):
# Part 1: Verify that test_depends() is dependent on
# test_skip(), test_failing(), and test_attr()
assert_in(test_skip, test_depends.dependencies)
assert_in(test_failing, test_depends.dependencies)
assert_in(test_attr, test_depends.dependencies)
# Part 2: Verify that test_depends() is in the depedents set
# of test_skip(), test_failing(), and test_attr()
assert_in(test_depends, test_skip.dependents)
assert_in(test_depends, test_failing.dependents)
assert_in(test_depends, test_attr.dependents)
@istest
def raises(self):
# Verify that the set of expected exceptions is as expected
assert_set_equal(test_raises.raises, set([DecoratorTestException]))
# Verify that it's the empty set on something else
assert_set_equal(test_timed.raises, set())
@istest
def timed(self):
# Verify that the timeout is set properly
assert_equal(test_timed.timeout, 1)
# Verify that it's None on something else
assert_is_none(test_raises.timeout)
|
|
1cbd6aedeb3b7c3b867f0927450154f386de5664
|
tests/test_playsongin.py
|
tests/test_playsongin.py
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying songs in an album"""
results = run_filter('playsongin', 'beatles For SALE')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_trim_whitespace():
"""should trim whitespace when querying songs in an album"""
results = run_filter('playsongin', ' beatles for sale ')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_partial():
"""should match partial queries when querying songs in an album"""
results = run_filter('playsongin', 'bea sal')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_title():
"""songin result should display song name in title"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_subtitle():
"""songin result should display artist name in subtitle"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['subtitle'], 'The Beatles')
def test_result_valid():
"""songin result should be actionable"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""songin result should display correct artwork as icon"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
|
Add tests for playsongin filter
|
Add tests for playsongin filter
|
Python
|
mit
|
caleb531/play-song,caleb531/play-song
|
Add tests for playsongin filter
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying songs in an album"""
results = run_filter('playsongin', 'beatles For SALE')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_trim_whitespace():
"""should trim whitespace when querying songs in an album"""
results = run_filter('playsongin', ' beatles for sale ')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_partial():
"""should match partial queries when querying songs in an album"""
results = run_filter('playsongin', 'bea sal')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_title():
"""songin result should display song name in title"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_subtitle():
"""songin result should display artist name in subtitle"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['subtitle'], 'The Beatles')
def test_result_valid():
"""songin result should be actionable"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""songin result should display correct artwork as icon"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
|
<commit_before><commit_msg>Add tests for playsongin filter<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying songs in an album"""
results = run_filter('playsongin', 'beatles For SALE')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_trim_whitespace():
"""should trim whitespace when querying songs in an album"""
results = run_filter('playsongin', ' beatles for sale ')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_partial():
"""should match partial queries when querying songs in an album"""
results = run_filter('playsongin', 'bea sal')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_title():
"""songin result should display song name in title"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_subtitle():
"""songin result should display artist name in subtitle"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['subtitle'], 'The Beatles')
def test_result_valid():
"""songin result should be actionable"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""songin result should display correct artwork as icon"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
|
Add tests for playsongin filter#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying songs in an album"""
results = run_filter('playsongin', 'beatles For SALE')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_trim_whitespace():
"""should trim whitespace when querying songs in an album"""
results = run_filter('playsongin', ' beatles for sale ')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_partial():
"""should match partial queries when querying songs in an album"""
results = run_filter('playsongin', 'bea sal')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_title():
"""songin result should display song name in title"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_subtitle():
"""songin result should display artist name in subtitle"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['subtitle'], 'The Beatles')
def test_result_valid():
"""songin result should be actionable"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""songin result should display correct artwork as icon"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
|
<commit_before><commit_msg>Add tests for playsongin filter<commit_after>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying songs in an album"""
results = run_filter('playsongin', 'beatles For SALE')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_trim_whitespace():
"""should trim whitespace when querying songs in an album"""
results = run_filter('playsongin', ' beatles for sale ')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_query_partial():
"""should match partial queries when querying songs in an album"""
results = run_filter('playsongin', 'bea sal')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_title():
"""songin result should display song name in title"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['title'], 'No Reply')
def test_result_subtitle():
"""songin result should display artist name in subtitle"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['subtitle'], 'The Beatles')
def test_result_valid():
"""songin result should be actionable"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""songin result should display correct artwork as icon"""
results = run_filter('playsongin', 'beatles for sale')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
|
|
af94b633f11cc7ea550dd52a165989370137a27d
|
paretoer.py
|
paretoer.py
|
#!/usr/bin/env python
import sys
import csv
import string
class CSVParetoer():
"""
This class represents an instance of paretoization. In short,
it operates on a CSV file and produces another file with a list
of tags to be applied to that file.
"""
def __init__(self, input_path, output_path):
self.input_file = open(input_path, "r")
self.output_file = open(output_path, "w")
self.counts = {}
def update_counts(self, str_to_count):
zero_punctuation = str_to_count.translate(None, string.punctuation)
word_list = zero_punctuation.split()
for word in word_list:
word = word.upper()
if word in self.counts:
self.counts[word] += 1
else:
self.counts[word] = 1
def pareto(self, column_list):
dialect = csv.Sniffer().sniff(self.input_file.read(1024))
input_file.seek(0)
reader = csv.reader(csvfile, dialect)
if csv.Sniffer().has_header(self.input_file.read(1024)):
print "Reading from these columns:"
for number in column_list:
print reader.next()[number]
else:
reader.next()
for line in reader:
for number in column_list:
update_counts(line[number])
def write_counts(self):
count_tuples = self.counts.iteritems()
sorted_tuples = sorted(count_tuples, key=lambda word: word[1])
for word, count in sorted_tuples:
self.output_file.write("%s: %s" % word, count)
self.output_file.write("\n")
if __name__ == '__main__':
input_path = sys.argv[1]
output_path = sys.argv[2]
column_string = input(
'Which columns would you like to read from? Numbers separated by commas, please.\n')
column_list = int(x) for x in column_string.split(",")
paretoer = CSVParetoer(input_path, output_path)
paretoer.pareto(column_list)
paretoer.write_counts()
|
Add main file. Initial functionality, no error checking.
|
Add main file. Initial functionality, no error checking.
|
Python
|
mit
|
sposterkil/pareto-project
|
Add main file. Initial functionality, no error checking.
|
#!/usr/bin/env python
import sys
import csv
import string
class CSVParetoer():
"""
This class represents an instance of paretoization. In short,
it operates on a CSV file and produces another file with a list
of tags to be applied to that file.
"""
def __init__(self, input_path, output_path):
self.input_file = open(input_path, "r")
self.output_file = open(output_path, "w")
self.counts = {}
def update_counts(self, str_to_count):
zero_punctuation = str_to_count.translate(None, string.punctuation)
word_list = zero_punctuation.split()
for word in word_list:
word = word.upper()
if word in self.counts:
self.counts[word] += 1
else:
self.counts[word] = 1
def pareto(self, column_list):
dialect = csv.Sniffer().sniff(self.input_file.read(1024))
input_file.seek(0)
reader = csv.reader(csvfile, dialect)
if csv.Sniffer().has_header(self.input_file.read(1024)):
print "Reading from these columns:"
for number in column_list:
print reader.next()[number]
else:
reader.next()
for line in reader:
for number in column_list:
update_counts(line[number])
def write_counts(self):
count_tuples = self.counts.iteritems()
sorted_tuples = sorted(count_tuples, key=lambda word: word[1])
for word, count in sorted_tuples:
self.output_file.write("%s: %s" % word, count)
self.output_file.write("\n")
if __name__ == '__main__':
input_path = sys.argv[1]
output_path = sys.argv[2]
column_string = input(
'Which columns would you like to read from? Numbers separated by commas, please.\n')
column_list = int(x) for x in column_string.split(",")
paretoer = CSVParetoer(input_path, output_path)
paretoer.pareto(column_list)
paretoer.write_counts()
|
<commit_before><commit_msg>Add main file. Initial functionality, no error checking.<commit_after>
|
#!/usr/bin/env python
import sys
import csv
import string
class CSVParetoer():
"""
This class represents an instance of paretoization. In short,
it operates on a CSV file and produces another file with a list
of tags to be applied to that file.
"""
def __init__(self, input_path, output_path):
self.input_file = open(input_path, "r")
self.output_file = open(output_path, "w")
self.counts = {}
def update_counts(self, str_to_count):
zero_punctuation = str_to_count.translate(None, string.punctuation)
word_list = zero_punctuation.split()
for word in word_list:
word = word.upper()
if word in self.counts:
self.counts[word] += 1
else:
self.counts[word] = 1
def pareto(self, column_list):
dialect = csv.Sniffer().sniff(self.input_file.read(1024))
input_file.seek(0)
reader = csv.reader(csvfile, dialect)
if csv.Sniffer().has_header(self.input_file.read(1024)):
print "Reading from these columns:"
for number in column_list:
print reader.next()[number]
else:
reader.next()
for line in reader:
for number in column_list:
update_counts(line[number])
def write_counts(self):
count_tuples = self.counts.iteritems()
sorted_tuples = sorted(count_tuples, key=lambda word: word[1])
for word, count in sorted_tuples:
self.output_file.write("%s: %s" % word, count)
self.output_file.write("\n")
if __name__ == '__main__':
input_path = sys.argv[1]
output_path = sys.argv[2]
column_string = input(
'Which columns would you like to read from? Numbers separated by commas, please.\n')
column_list = int(x) for x in column_string.split(",")
paretoer = CSVParetoer(input_path, output_path)
paretoer.pareto(column_list)
paretoer.write_counts()
|
Add main file. Initial functionality, no error checking.#!/usr/bin/env python
import sys
import csv
import string
class CSVParetoer():
"""
This class represents an instance of paretoization. In short,
it operates on a CSV file and produces another file with a list
of tags to be applied to that file.
"""
def __init__(self, input_path, output_path):
self.input_file = open(input_path, "r")
self.output_file = open(output_path, "w")
self.counts = {}
def update_counts(self, str_to_count):
zero_punctuation = str_to_count.translate(None, string.punctuation)
word_list = zero_punctuation.split()
for word in word_list:
word = word.upper()
if word in self.counts:
self.counts[word] += 1
else:
self.counts[word] = 1
def pareto(self, column_list):
dialect = csv.Sniffer().sniff(self.input_file.read(1024))
input_file.seek(0)
reader = csv.reader(csvfile, dialect)
if csv.Sniffer().has_header(self.input_file.read(1024)):
print "Reading from these columns:"
for number in column_list:
print reader.next()[number]
else:
reader.next()
for line in reader:
for number in column_list:
update_counts(line[number])
def write_counts(self):
count_tuples = self.counts.iteritems()
sorted_tuples = sorted(count_tuples, key=lambda word: word[1])
for word, count in sorted_tuples:
self.output_file.write("%s: %s" % word, count)
self.output_file.write("\n")
if __name__ == '__main__':
input_path = sys.argv[1]
output_path = sys.argv[2]
column_string = input(
'Which columns would you like to read from? Numbers separated by commas, please.\n')
column_list = int(x) for x in column_string.split(",")
paretoer = CSVParetoer(input_path, output_path)
paretoer.pareto(column_list)
paretoer.write_counts()
|
<commit_before><commit_msg>Add main file. Initial functionality, no error checking.<commit_after>#!/usr/bin/env python
import sys
import csv
import string
class CSVParetoer():
"""
This class represents an instance of paretoization. In short,
it operates on a CSV file and produces another file with a list
of tags to be applied to that file.
"""
def __init__(self, input_path, output_path):
self.input_file = open(input_path, "r")
self.output_file = open(output_path, "w")
self.counts = {}
def update_counts(self, str_to_count):
zero_punctuation = str_to_count.translate(None, string.punctuation)
word_list = zero_punctuation.split()
for word in word_list:
word = word.upper()
if word in self.counts:
self.counts[word] += 1
else:
self.counts[word] = 1
def pareto(self, column_list):
dialect = csv.Sniffer().sniff(self.input_file.read(1024))
input_file.seek(0)
reader = csv.reader(csvfile, dialect)
if csv.Sniffer().has_header(self.input_file.read(1024)):
print "Reading from these columns:"
for number in column_list:
print reader.next()[number]
else:
reader.next()
for line in reader:
for number in column_list:
update_counts(line[number])
def write_counts(self):
count_tuples = self.counts.iteritems()
sorted_tuples = sorted(count_tuples, key=lambda word: word[1])
for word, count in sorted_tuples:
self.output_file.write("%s: %s" % word, count)
self.output_file.write("\n")
if __name__ == '__main__':
input_path = sys.argv[1]
output_path = sys.argv[2]
column_string = input(
'Which columns would you like to read from? Numbers separated by commas, please.\n')
column_list = int(x) for x in column_string.split(",")
paretoer = CSVParetoer(input_path, output_path)
paretoer.pareto(column_list)
paretoer.write_counts()
|
|
bb467be1398e43becde8a3cb2494ba0c81ec40e1
|
kpi/ona_db_routers.py
|
kpi/ona_db_routers.py
|
from __future__ import unicode_literals, absolute_import
from .constants import SHADOW_MODEL_APP_LABEL
class DefaultDatabaseRouter(object):
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return "default"
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return "default"
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed
"""
return True
def allow_migrate(self, db, app_label, model=None, **hints):
"""
All default models end up in this pool.
"""
if app_label == SHADOW_MODEL_APP_LABEL:
return False
return True
Collapse
|
Include ona database router file
|
Include ona database router file
|
Python
|
agpl-3.0
|
onaio/kpi,onaio/kpi,onaio/kpi,onaio/kpi
|
Include ona database router file
|
from __future__ import unicode_literals, absolute_import
from .constants import SHADOW_MODEL_APP_LABEL
class DefaultDatabaseRouter(object):
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return "default"
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return "default"
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed
"""
return True
def allow_migrate(self, db, app_label, model=None, **hints):
"""
All default models end up in this pool.
"""
if app_label == SHADOW_MODEL_APP_LABEL:
return False
return True
Collapse
|
<commit_before><commit_msg>Include ona database router file<commit_after>
|
from __future__ import unicode_literals, absolute_import
from .constants import SHADOW_MODEL_APP_LABEL
class DefaultDatabaseRouter(object):
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return "default"
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return "default"
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed
"""
return True
def allow_migrate(self, db, app_label, model=None, **hints):
"""
All default models end up in this pool.
"""
if app_label == SHADOW_MODEL_APP_LABEL:
return False
return True
Collapse
|
Include ona database router filefrom __future__ import unicode_literals, absolute_import
from .constants import SHADOW_MODEL_APP_LABEL
class DefaultDatabaseRouter(object):
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return "default"
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return "default"
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed
"""
return True
def allow_migrate(self, db, app_label, model=None, **hints):
"""
All default models end up in this pool.
"""
if app_label == SHADOW_MODEL_APP_LABEL:
return False
return True
Collapse
|
<commit_before><commit_msg>Include ona database router file<commit_after>from __future__ import unicode_literals, absolute_import
from .constants import SHADOW_MODEL_APP_LABEL
class DefaultDatabaseRouter(object):
def db_for_read(self, model, **hints):
"""
Reads go to a randomly-chosen replica.
"""
return "default"
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
return "default"
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed
"""
return True
def allow_migrate(self, db, app_label, model=None, **hints):
"""
All default models end up in this pool.
"""
if app_label == SHADOW_MODEL_APP_LABEL:
return False
return True
Collapse
|
|
29e8cf3a1ecd3ce24a1d4473f7817da6df815c77
|
salad/terrains/browser.py
|
salad/terrains/browser.py
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
world.master_browser = setup_browser(world.drivers[0], world.remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
try:
browser = world.drivers[0]
remote_url = world.remote_url
except AttributeError, IndexError:
browser = 'firefox'
remote_url = None
world.master_browser = setup_browser(browser, remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
Make sure executing lettuce still results in something sane
|
Make sure executing lettuce still results in something sane
|
Python
|
bsd-3-clause
|
salad/salad,adw0rd/salad-py3,beanqueen/salad,adw0rd/salad-py3,beanqueen/salad,salad/salad
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
world.master_browser = setup_browser(world.drivers[0], world.remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
Make sure executing lettuce still results in something sane
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
try:
browser = world.drivers[0]
remote_url = world.remote_url
except AttributeError, IndexError:
browser = 'firefox'
remote_url = None
world.master_browser = setup_browser(browser, remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
<commit_before>from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
world.master_browser = setup_browser(world.drivers[0], world.remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
<commit_msg>Make sure executing lettuce still results in something sane<commit_after>
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
try:
browser = world.drivers[0]
remote_url = world.remote_url
except AttributeError, IndexError:
browser = 'firefox'
remote_url = None
world.master_browser = setup_browser(browser, remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
world.master_browser = setup_browser(world.drivers[0], world.remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
Make sure executing lettuce still results in something sanefrom lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
try:
browser = world.drivers[0]
remote_url = world.remote_url
except AttributeError, IndexError:
browser = 'firefox'
remote_url = None
world.master_browser = setup_browser(browser, remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
<commit_before>from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
world.master_browser = setup_browser(world.drivers[0], world.remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
<commit_msg>Make sure executing lettuce still results in something sane<commit_after>from lettuce import before, world, after
from splinter.browser import Browser
from salad.logger import logger
@before.all
def setup_master_browser():
try:
browser = world.drivers[0]
remote_url = world.remote_url
except AttributeError, IndexError:
browser = 'firefox'
remote_url = None
world.master_browser = setup_browser(browser, remote_url)
world.browser = world.master_browser
def setup_browser(browser, url=None):
logger.info("Setting up browser %s..." % browser)
try:
if url:
browser = Browser('remote', url=url,
browser=browser)
else:
browser = Browser(browser)
except Exception as e:
logger.warn("Error starting up %s: %s" % (browser, e))
raise
return browser
@before.each_scenario
def clear_alternative_browsers(step):
world.browsers = []
@after.each_scenario
def reset_to_parent_frame(step):
if hasattr(world, "parent_browser"):
world.browser = world.parent_browser
@after.each_scenario
def restore_browser(step):
world.browser = world.master_browser
for browser in world.browsers:
teardown_browser(browser)
@after.all
def teardown_master_browser(total):
teardown_browser(world.master_browser)
def teardown_browser(browser):
name = browser.driver_name
logger.info("Tearing down browser %s..." % name)
try:
browser.quit()
except:
logger.warn("Error tearing down %s" % name)
|
d8cd90bfea137f27c356bb34687e4b847d2019f4
|
pylab/website/tests/test_comments.py
|
pylab/website/tests/test_comments.py
|
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_comments.models import Comment
from pylab.core.models import Project
class CommentsTests(TestCase):
def test_add_comment(self):
user = User.objects.create_user('user1')
project = Project.objects.create(
author=user,
title='Test project',
description='Description',
created='2015-08-13'
)
now = datetime.datetime.now()
comment = Comment.objects.create(
user_name='user2',
comment='test comment',
submit_date=now,
object_pk=project.id,
content_type_id=project.id,
site_id=1,
)
comment.save()
resp = self.client.post('/projects/test-project/', {'comment': comment})
self.assertEqual(resp.status_code, 200)
# self.assertContains(resp, 'test comment')
|
Add test for project comments
|
Add test for project comments
|
Python
|
agpl-3.0
|
python-dirbtuves/website,python-dirbtuves/website,python-dirbtuves/website
|
Add test for project comments
|
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_comments.models import Comment
from pylab.core.models import Project
class CommentsTests(TestCase):
def test_add_comment(self):
user = User.objects.create_user('user1')
project = Project.objects.create(
author=user,
title='Test project',
description='Description',
created='2015-08-13'
)
now = datetime.datetime.now()
comment = Comment.objects.create(
user_name='user2',
comment='test comment',
submit_date=now,
object_pk=project.id,
content_type_id=project.id,
site_id=1,
)
comment.save()
resp = self.client.post('/projects/test-project/', {'comment': comment})
self.assertEqual(resp.status_code, 200)
# self.assertContains(resp, 'test comment')
|
<commit_before><commit_msg>Add test for project comments<commit_after>
|
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_comments.models import Comment
from pylab.core.models import Project
class CommentsTests(TestCase):
def test_add_comment(self):
user = User.objects.create_user('user1')
project = Project.objects.create(
author=user,
title='Test project',
description='Description',
created='2015-08-13'
)
now = datetime.datetime.now()
comment = Comment.objects.create(
user_name='user2',
comment='test comment',
submit_date=now,
object_pk=project.id,
content_type_id=project.id,
site_id=1,
)
comment.save()
resp = self.client.post('/projects/test-project/', {'comment': comment})
self.assertEqual(resp.status_code, 200)
# self.assertContains(resp, 'test comment')
|
Add test for project commentsimport datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_comments.models import Comment
from pylab.core.models import Project
class CommentsTests(TestCase):
def test_add_comment(self):
user = User.objects.create_user('user1')
project = Project.objects.create(
author=user,
title='Test project',
description='Description',
created='2015-08-13'
)
now = datetime.datetime.now()
comment = Comment.objects.create(
user_name='user2',
comment='test comment',
submit_date=now,
object_pk=project.id,
content_type_id=project.id,
site_id=1,
)
comment.save()
resp = self.client.post('/projects/test-project/', {'comment': comment})
self.assertEqual(resp.status_code, 200)
# self.assertContains(resp, 'test comment')
|
<commit_before><commit_msg>Add test for project comments<commit_after>import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_comments.models import Comment
from pylab.core.models import Project
class CommentsTests(TestCase):
def test_add_comment(self):
user = User.objects.create_user('user1')
project = Project.objects.create(
author=user,
title='Test project',
description='Description',
created='2015-08-13'
)
now = datetime.datetime.now()
comment = Comment.objects.create(
user_name='user2',
comment='test comment',
submit_date=now,
object_pk=project.id,
content_type_id=project.id,
site_id=1,
)
comment.save()
resp = self.client.post('/projects/test-project/', {'comment': comment})
self.assertEqual(resp.status_code, 200)
# self.assertContains(resp, 'test comment')
|
|
e3838a3281184b4fe3428b55cb893ecfd213e845
|
zmq_io/out_pifacedigitalio.py
|
zmq_io/out_pifacedigitalio.py
|
import argparse
import json
import pifacedigitalio as pfdio
import zmq
PINS = (
0b00000001,
0b00000010,
0b00000100,
0b00001000,
0b00010000,
0b00100000,
0b01000000,
0b10000000
)
def parse_args():
"""
Specify and parse command line arguments.
"""
p = argparse.ArgumentParser()
p.add_argument("pub_uri")
p.add_argument("--prefix", default="INPUT")
return p.parse_args()
def set_up_pub_socket(uri):
"""
Create ZeroMQ PUB socket and bind it to the specified uri.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(uri)
return socket
def input_changed(event):
"""
Handler for input changes. Forms a dictionary containing event information and PUBlishes it
using the global ZeroMQ PUB socket.
"""
data = {
"pin": event.pin_num,
"direction": not bool(event.direction),
"state": {i: bool(event.interrupt_capture & PINS[i]) for i, _ in enumerate(PINS)}
}
socket.send("%s%s" % (args.prefix, json.dumps(data)))
if __name__ == "__main__":
args = parse_args()
socket = set_up_pub_socket(args.pub_uri)
listener = pfdio.InputEventListener()
for i, _ in enumerate(PINS):
listener.register(i, pfdio.IODIR_BOTH, input_changed)
listener.activate()
|
Add outbound pifacedigitalio module which uses interrupt events instead of polling.
|
Add outbound pifacedigitalio module which uses interrupt events instead of polling.
|
Python
|
unlicense
|
flyte/zmq-io-modules,flyte/zmq-io-modules
|
Add outbound pifacedigitalio module which uses interrupt events instead of polling.
|
import argparse
import json
import pifacedigitalio as pfdio
import zmq
PINS = (
0b00000001,
0b00000010,
0b00000100,
0b00001000,
0b00010000,
0b00100000,
0b01000000,
0b10000000
)
def parse_args():
"""
Specify and parse command line arguments.
"""
p = argparse.ArgumentParser()
p.add_argument("pub_uri")
p.add_argument("--prefix", default="INPUT")
return p.parse_args()
def set_up_pub_socket(uri):
"""
Create ZeroMQ PUB socket and bind it to the specified uri.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(uri)
return socket
def input_changed(event):
"""
Handler for input changes. Forms a dictionary containing event information and PUBlishes it
using the global ZeroMQ PUB socket.
"""
data = {
"pin": event.pin_num,
"direction": not bool(event.direction),
"state": {i: bool(event.interrupt_capture & PINS[i]) for i, _ in enumerate(PINS)}
}
socket.send("%s%s" % (args.prefix, json.dumps(data)))
if __name__ == "__main__":
args = parse_args()
socket = set_up_pub_socket(args.pub_uri)
listener = pfdio.InputEventListener()
for i, _ in enumerate(PINS):
listener.register(i, pfdio.IODIR_BOTH, input_changed)
listener.activate()
|
<commit_before><commit_msg>Add outbound pifacedigitalio module which uses interrupt events instead of polling.<commit_after>
|
import argparse
import json
import pifacedigitalio as pfdio
import zmq
PINS = (
0b00000001,
0b00000010,
0b00000100,
0b00001000,
0b00010000,
0b00100000,
0b01000000,
0b10000000
)
def parse_args():
"""
Specify and parse command line arguments.
"""
p = argparse.ArgumentParser()
p.add_argument("pub_uri")
p.add_argument("--prefix", default="INPUT")
return p.parse_args()
def set_up_pub_socket(uri):
"""
Create ZeroMQ PUB socket and bind it to the specified uri.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(uri)
return socket
def input_changed(event):
"""
Handler for input changes. Forms a dictionary containing event information and PUBlishes it
using the global ZeroMQ PUB socket.
"""
data = {
"pin": event.pin_num,
"direction": not bool(event.direction),
"state": {i: bool(event.interrupt_capture & PINS[i]) for i, _ in enumerate(PINS)}
}
socket.send("%s%s" % (args.prefix, json.dumps(data)))
if __name__ == "__main__":
args = parse_args()
socket = set_up_pub_socket(args.pub_uri)
listener = pfdio.InputEventListener()
for i, _ in enumerate(PINS):
listener.register(i, pfdio.IODIR_BOTH, input_changed)
listener.activate()
|
Add outbound pifacedigitalio module which uses interrupt events instead of polling.import argparse
import json
import pifacedigitalio as pfdio
import zmq
PINS = (
0b00000001,
0b00000010,
0b00000100,
0b00001000,
0b00010000,
0b00100000,
0b01000000,
0b10000000
)
def parse_args():
"""
Specify and parse command line arguments.
"""
p = argparse.ArgumentParser()
p.add_argument("pub_uri")
p.add_argument("--prefix", default="INPUT")
return p.parse_args()
def set_up_pub_socket(uri):
"""
Create ZeroMQ PUB socket and bind it to the specified uri.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(uri)
return socket
def input_changed(event):
"""
Handler for input changes. Forms a dictionary containing event information and PUBlishes it
using the global ZeroMQ PUB socket.
"""
data = {
"pin": event.pin_num,
"direction": not bool(event.direction),
"state": {i: bool(event.interrupt_capture & PINS[i]) for i, _ in enumerate(PINS)}
}
socket.send("%s%s" % (args.prefix, json.dumps(data)))
if __name__ == "__main__":
args = parse_args()
socket = set_up_pub_socket(args.pub_uri)
listener = pfdio.InputEventListener()
for i, _ in enumerate(PINS):
listener.register(i, pfdio.IODIR_BOTH, input_changed)
listener.activate()
|
<commit_before><commit_msg>Add outbound pifacedigitalio module which uses interrupt events instead of polling.<commit_after>import argparse
import json
import pifacedigitalio as pfdio
import zmq
PINS = (
0b00000001,
0b00000010,
0b00000100,
0b00001000,
0b00010000,
0b00100000,
0b01000000,
0b10000000
)
def parse_args():
"""
Specify and parse command line arguments.
"""
p = argparse.ArgumentParser()
p.add_argument("pub_uri")
p.add_argument("--prefix", default="INPUT")
return p.parse_args()
def set_up_pub_socket(uri):
"""
Create ZeroMQ PUB socket and bind it to the specified uri.
"""
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind(uri)
return socket
def input_changed(event):
"""
Handler for input changes. Forms a dictionary containing event information and PUBlishes it
using the global ZeroMQ PUB socket.
"""
data = {
"pin": event.pin_num,
"direction": not bool(event.direction),
"state": {i: bool(event.interrupt_capture & PINS[i]) for i, _ in enumerate(PINS)}
}
socket.send("%s%s" % (args.prefix, json.dumps(data)))
if __name__ == "__main__":
args = parse_args()
socket = set_up_pub_socket(args.pub_uri)
listener = pfdio.InputEventListener()
for i, _ in enumerate(PINS):
listener.register(i, pfdio.IODIR_BOTH, input_changed)
listener.activate()
|
|
d2883e9c38d0b093c78b2145343b922fd2406cbb
|
samples/debugger_membp_singlestep.py
|
samples/debugger_membp_singlestep.py
|
import sys
import os.path
import pprint
sys.path.append(os.path.abspath(__file__ + "\..\.."))
import windows
import windows.test
import windows.debug
import windows.native_exec.simple_x86 as x86
from windows.generated_def.winstructs import *
class MyDebugger(windows.debug.Debugger):
def __init__(self, *args, **kwargs):
super(MyDebugger, self).__init__(*args, **kwargs)
self.single_step_counter = 0
def on_exception(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got exception {0} at 0x{1:x}".format(code, addr))
def on_single_step(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got single_step {0} at 0x{1:x}".format(code, addr))
self.single_step_counter -= 1
if self.single_step_counter > 0:
return self.single_step()
class SingleStepOnWrite(windows.debug.MemoryBreakpoint):
DEFAULT_PROTECT = PAGE_READONLY
"""Check that BP/dbg can trigger single step and that instruction follows"""
def trigger(self, dbg, exc):
fault_addr = exc.ExceptionRecord.ExceptionInformation[1]
eip = dbg.current_thread.context.pc
print("Instruction at <{0:#x}> wrote at <{1:#x}>".format(eip, fault_addr))
dbg.single_step_counter = 4
return dbg.single_step()
calc = windows.test.pop_calc_32(dwCreationFlags=DEBUG_PROCESS)
d = MyDebugger(calc)
code = calc.virtual_alloc(0x1000)
data = calc.virtual_alloc(0x1000)
injected = x86.MultipleInstr()
injected += x86.Mov("EAX", 0)
injected += x86.Mov(x86.deref(data), "EAX")
injected += x86.Add("EAX", 4)
injected += x86.Mov(x86.deref(data + 4), "EAX")
injected += x86.Nop()
injected += x86.Nop()
injected += x86.Ret()
calc.write_memory(code, injected.get_code())
d.add_bp(SingleStepOnWrite(data, size=0x1000))
calc.create_thread(code, 0)
d.loop()
|
Add a sample on memoryBP + singlestep (cc Heurs :D)
|
Add a sample on memoryBP + singlestep (cc Heurs :D)
|
Python
|
bsd-3-clause
|
hakril/PythonForWindows
|
Add a sample on memoryBP + singlestep (cc Heurs :D)
|
import sys
import os.path
import pprint
sys.path.append(os.path.abspath(__file__ + "\..\.."))
import windows
import windows.test
import windows.debug
import windows.native_exec.simple_x86 as x86
from windows.generated_def.winstructs import *
class MyDebugger(windows.debug.Debugger):
def __init__(self, *args, **kwargs):
super(MyDebugger, self).__init__(*args, **kwargs)
self.single_step_counter = 0
def on_exception(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got exception {0} at 0x{1:x}".format(code, addr))
def on_single_step(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got single_step {0} at 0x{1:x}".format(code, addr))
self.single_step_counter -= 1
if self.single_step_counter > 0:
return self.single_step()
class SingleStepOnWrite(windows.debug.MemoryBreakpoint):
DEFAULT_PROTECT = PAGE_READONLY
"""Check that BP/dbg can trigger single step and that instruction follows"""
def trigger(self, dbg, exc):
fault_addr = exc.ExceptionRecord.ExceptionInformation[1]
eip = dbg.current_thread.context.pc
print("Instruction at <{0:#x}> wrote at <{1:#x}>".format(eip, fault_addr))
dbg.single_step_counter = 4
return dbg.single_step()
calc = windows.test.pop_calc_32(dwCreationFlags=DEBUG_PROCESS)
d = MyDebugger(calc)
code = calc.virtual_alloc(0x1000)
data = calc.virtual_alloc(0x1000)
injected = x86.MultipleInstr()
injected += x86.Mov("EAX", 0)
injected += x86.Mov(x86.deref(data), "EAX")
injected += x86.Add("EAX", 4)
injected += x86.Mov(x86.deref(data + 4), "EAX")
injected += x86.Nop()
injected += x86.Nop()
injected += x86.Ret()
calc.write_memory(code, injected.get_code())
d.add_bp(SingleStepOnWrite(data, size=0x1000))
calc.create_thread(code, 0)
d.loop()
|
<commit_before><commit_msg>Add a sample on memoryBP + singlestep (cc Heurs :D)<commit_after>
|
import sys
import os.path
import pprint
sys.path.append(os.path.abspath(__file__ + "\..\.."))
import windows
import windows.test
import windows.debug
import windows.native_exec.simple_x86 as x86
from windows.generated_def.winstructs import *
class MyDebugger(windows.debug.Debugger):
def __init__(self, *args, **kwargs):
super(MyDebugger, self).__init__(*args, **kwargs)
self.single_step_counter = 0
def on_exception(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got exception {0} at 0x{1:x}".format(code, addr))
def on_single_step(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got single_step {0} at 0x{1:x}".format(code, addr))
self.single_step_counter -= 1
if self.single_step_counter > 0:
return self.single_step()
class SingleStepOnWrite(windows.debug.MemoryBreakpoint):
DEFAULT_PROTECT = PAGE_READONLY
"""Check that BP/dbg can trigger single step and that instruction follows"""
def trigger(self, dbg, exc):
fault_addr = exc.ExceptionRecord.ExceptionInformation[1]
eip = dbg.current_thread.context.pc
print("Instruction at <{0:#x}> wrote at <{1:#x}>".format(eip, fault_addr))
dbg.single_step_counter = 4
return dbg.single_step()
calc = windows.test.pop_calc_32(dwCreationFlags=DEBUG_PROCESS)
d = MyDebugger(calc)
code = calc.virtual_alloc(0x1000)
data = calc.virtual_alloc(0x1000)
injected = x86.MultipleInstr()
injected += x86.Mov("EAX", 0)
injected += x86.Mov(x86.deref(data), "EAX")
injected += x86.Add("EAX", 4)
injected += x86.Mov(x86.deref(data + 4), "EAX")
injected += x86.Nop()
injected += x86.Nop()
injected += x86.Ret()
calc.write_memory(code, injected.get_code())
d.add_bp(SingleStepOnWrite(data, size=0x1000))
calc.create_thread(code, 0)
d.loop()
|
Add a sample on memoryBP + singlestep (cc Heurs :D)import sys
import os.path
import pprint
sys.path.append(os.path.abspath(__file__ + "\..\.."))
import windows
import windows.test
import windows.debug
import windows.native_exec.simple_x86 as x86
from windows.generated_def.winstructs import *
class MyDebugger(windows.debug.Debugger):
def __init__(self, *args, **kwargs):
super(MyDebugger, self).__init__(*args, **kwargs)
self.single_step_counter = 0
def on_exception(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got exception {0} at 0x{1:x}".format(code, addr))
def on_single_step(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got single_step {0} at 0x{1:x}".format(code, addr))
self.single_step_counter -= 1
if self.single_step_counter > 0:
return self.single_step()
class SingleStepOnWrite(windows.debug.MemoryBreakpoint):
DEFAULT_PROTECT = PAGE_READONLY
"""Check that BP/dbg can trigger single step and that instruction follows"""
def trigger(self, dbg, exc):
fault_addr = exc.ExceptionRecord.ExceptionInformation[1]
eip = dbg.current_thread.context.pc
print("Instruction at <{0:#x}> wrote at <{1:#x}>".format(eip, fault_addr))
dbg.single_step_counter = 4
return dbg.single_step()
calc = windows.test.pop_calc_32(dwCreationFlags=DEBUG_PROCESS)
d = MyDebugger(calc)
code = calc.virtual_alloc(0x1000)
data = calc.virtual_alloc(0x1000)
injected = x86.MultipleInstr()
injected += x86.Mov("EAX", 0)
injected += x86.Mov(x86.deref(data), "EAX")
injected += x86.Add("EAX", 4)
injected += x86.Mov(x86.deref(data + 4), "EAX")
injected += x86.Nop()
injected += x86.Nop()
injected += x86.Ret()
calc.write_memory(code, injected.get_code())
d.add_bp(SingleStepOnWrite(data, size=0x1000))
calc.create_thread(code, 0)
d.loop()
|
<commit_before><commit_msg>Add a sample on memoryBP + singlestep (cc Heurs :D)<commit_after>import sys
import os.path
import pprint
sys.path.append(os.path.abspath(__file__ + "\..\.."))
import windows
import windows.test
import windows.debug
import windows.native_exec.simple_x86 as x86
from windows.generated_def.winstructs import *
class MyDebugger(windows.debug.Debugger):
def __init__(self, *args, **kwargs):
super(MyDebugger, self).__init__(*args, **kwargs)
self.single_step_counter = 0
def on_exception(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got exception {0} at 0x{1:x}".format(code, addr))
def on_single_step(self, exception):
code = exception.ExceptionRecord.ExceptionCode
addr = exception.ExceptionRecord.ExceptionAddress
print("Got single_step {0} at 0x{1:x}".format(code, addr))
self.single_step_counter -= 1
if self.single_step_counter > 0:
return self.single_step()
class SingleStepOnWrite(windows.debug.MemoryBreakpoint):
DEFAULT_PROTECT = PAGE_READONLY
"""Check that BP/dbg can trigger single step and that instruction follows"""
def trigger(self, dbg, exc):
fault_addr = exc.ExceptionRecord.ExceptionInformation[1]
eip = dbg.current_thread.context.pc
print("Instruction at <{0:#x}> wrote at <{1:#x}>".format(eip, fault_addr))
dbg.single_step_counter = 4
return dbg.single_step()
calc = windows.test.pop_calc_32(dwCreationFlags=DEBUG_PROCESS)
d = MyDebugger(calc)
code = calc.virtual_alloc(0x1000)
data = calc.virtual_alloc(0x1000)
injected = x86.MultipleInstr()
injected += x86.Mov("EAX", 0)
injected += x86.Mov(x86.deref(data), "EAX")
injected += x86.Add("EAX", 4)
injected += x86.Mov(x86.deref(data + 4), "EAX")
injected += x86.Nop()
injected += x86.Nop()
injected += x86.Ret()
calc.write_memory(code, injected.get_code())
d.add_bp(SingleStepOnWrite(data, size=0x1000))
calc.create_thread(code, 0)
d.loop()
|
|
f08aa45603056c4fa8fca398a65d7bd751a7e4ed
|
src/Scripts/cachelines.py
|
src/Scripts/cachelines.py
|
# Takes a Document Frequency Table and caclulates the expected number of cache
# misses for each term.
#
# TODO: allow use of different TermTreatments.
# TODO: this assumes we subtract 1 from 'rows' because the DocumentActive row is counted as a row. Check that assumption!
from collections import defaultdict
import csv
import math
frequency = defaultdict(float)
with open("/tmp/wikipedia/config/DocFreqTable-0.csv") as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['hash','gramSize', 'streamId', 'frequency', 'text']
for dft_row in reader:
frequency[dft_row[4]] = float(dft_row[3])
cachelines_per_row = 60 # TODO: don't hardcode this.
density = 0.1 # TODO: don't hardcode this.
with open("/tmp/QueryPipelineStatistics.csv") as f:
reader = csv.reader(f)
qp_header = next(reader)
assert qp_header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
outf = open("/tmp/Memory.csv", 'w', newline='')
writer = csv.writer(outf)
writer.writerow(['Query',
'TermPos',
'Quadwords',
'Cachelines',
'ExpectedCachelines'])
pos = 0
for qp_row in reader:
term = qp_row[0]
s = frequency[term]
num_rows = int(qp_row[1]) - 1 # TODO: should we subtract 1?
expected_misses = 0
if num_rows == 1:
# private row.
expected_misses = cachelines_per_row
else:
# TODO: consider accounting for actual row density.
for i in range(1, num_rows + 1):
p = 1 - math.pow((1 - math.pow(density - s, i)) * (1 - s), 512)
expected_misses += p * cachelines_per_row
writer.writerow([term,
pos,
qp_row[3], # quadwords
qp_row[4], # cachlines
expected_misses])
|
Add script for cache miss estimation.
|
Add script for cache miss estimation.
|
Python
|
mit
|
danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel
|
Add script for cache miss estimation.
|
# Takes a Document Frequency Table and caclulates the expected number of cache
# misses for each term.
#
# TODO: allow use of different TermTreatments.
# TODO: this assumes we subtract 1 from 'rows' because the DocumentActive row is counted as a row. Check that assumption!
from collections import defaultdict
import csv
import math
frequency = defaultdict(float)
with open("/tmp/wikipedia/config/DocFreqTable-0.csv") as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['hash','gramSize', 'streamId', 'frequency', 'text']
for dft_row in reader:
frequency[dft_row[4]] = float(dft_row[3])
cachelines_per_row = 60 # TODO: don't hardcode this.
density = 0.1 # TODO: don't hardcode this.
with open("/tmp/QueryPipelineStatistics.csv") as f:
reader = csv.reader(f)
qp_header = next(reader)
assert qp_header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
outf = open("/tmp/Memory.csv", 'w', newline='')
writer = csv.writer(outf)
writer.writerow(['Query',
'TermPos',
'Quadwords',
'Cachelines',
'ExpectedCachelines'])
pos = 0
for qp_row in reader:
term = qp_row[0]
s = frequency[term]
num_rows = int(qp_row[1]) - 1 # TODO: should we subtract 1?
expected_misses = 0
if num_rows == 1:
# private row.
expected_misses = cachelines_per_row
else:
# TODO: consider accounting for actual row density.
for i in range(1, num_rows + 1):
p = 1 - math.pow((1 - math.pow(density - s, i)) * (1 - s), 512)
expected_misses += p * cachelines_per_row
writer.writerow([term,
pos,
qp_row[3], # quadwords
qp_row[4], # cachlines
expected_misses])
|
<commit_before><commit_msg>Add script for cache miss estimation.<commit_after>
|
# Takes a Document Frequency Table and caclulates the expected number of cache
# misses for each term.
#
# TODO: allow use of different TermTreatments.
# TODO: this assumes we subtract 1 from 'rows' because the DocumentActive row is counted as a row. Check that assumption!
from collections import defaultdict
import csv
import math
frequency = defaultdict(float)
with open("/tmp/wikipedia/config/DocFreqTable-0.csv") as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['hash','gramSize', 'streamId', 'frequency', 'text']
for dft_row in reader:
frequency[dft_row[4]] = float(dft_row[3])
cachelines_per_row = 60 # TODO: don't hardcode this.
density = 0.1 # TODO: don't hardcode this.
with open("/tmp/QueryPipelineStatistics.csv") as f:
reader = csv.reader(f)
qp_header = next(reader)
assert qp_header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
outf = open("/tmp/Memory.csv", 'w', newline='')
writer = csv.writer(outf)
writer.writerow(['Query',
'TermPos',
'Quadwords',
'Cachelines',
'ExpectedCachelines'])
pos = 0
for qp_row in reader:
term = qp_row[0]
s = frequency[term]
num_rows = int(qp_row[1]) - 1 # TODO: should we subtract 1?
expected_misses = 0
if num_rows == 1:
# private row.
expected_misses = cachelines_per_row
else:
# TODO: consider accounting for actual row density.
for i in range(1, num_rows + 1):
p = 1 - math.pow((1 - math.pow(density - s, i)) * (1 - s), 512)
expected_misses += p * cachelines_per_row
writer.writerow([term,
pos,
qp_row[3], # quadwords
qp_row[4], # cachlines
expected_misses])
|
Add script for cache miss estimation.# Takes a Document Frequency Table and caclulates the expected number of cache
# misses for each term.
#
# TODO: allow use of different TermTreatments.
# TODO: this assumes we subtract 1 from 'rows' because the DocumentActive row is counted as a row. Check that assumption!
from collections import defaultdict
import csv
import math
frequency = defaultdict(float)
with open("/tmp/wikipedia/config/DocFreqTable-0.csv") as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['hash','gramSize', 'streamId', 'frequency', 'text']
for dft_row in reader:
frequency[dft_row[4]] = float(dft_row[3])
cachelines_per_row = 60 # TODO: don't hardcode this.
density = 0.1 # TODO: don't hardcode this.
with open("/tmp/QueryPipelineStatistics.csv") as f:
reader = csv.reader(f)
qp_header = next(reader)
assert qp_header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
outf = open("/tmp/Memory.csv", 'w', newline='')
writer = csv.writer(outf)
writer.writerow(['Query',
'TermPos',
'Quadwords',
'Cachelines',
'ExpectedCachelines'])
pos = 0
for qp_row in reader:
term = qp_row[0]
s = frequency[term]
num_rows = int(qp_row[1]) - 1 # TODO: should we subtract 1?
expected_misses = 0
if num_rows == 1:
# private row.
expected_misses = cachelines_per_row
else:
# TODO: consider accounting for actual row density.
for i in range(1, num_rows + 1):
p = 1 - math.pow((1 - math.pow(density - s, i)) * (1 - s), 512)
expected_misses += p * cachelines_per_row
writer.writerow([term,
pos,
qp_row[3], # quadwords
qp_row[4], # cachlines
expected_misses])
|
<commit_before><commit_msg>Add script for cache miss estimation.<commit_after># Takes a Document Frequency Table and caclulates the expected number of cache
# misses for each term.
#
# TODO: allow use of different TermTreatments.
# TODO: this assumes we subtract 1 from 'rows' because the DocumentActive row is counted as a row. Check that assumption!
from collections import defaultdict
import csv
import math
frequency = defaultdict(float)
with open("/tmp/wikipedia/config/DocFreqTable-0.csv") as f:
reader = csv.reader(f)
header = next(reader)
assert header == ['hash','gramSize', 'streamId', 'frequency', 'text']
for dft_row in reader:
frequency[dft_row[4]] = float(dft_row[3])
cachelines_per_row = 60 # TODO: don't hardcode this.
density = 0.1 # TODO: don't hardcode this.
with open("/tmp/QueryPipelineStatistics.csv") as f:
reader = csv.reader(f)
qp_header = next(reader)
assert qp_header == ['query',
'rows',
'matches',
'quadwords',
'cachelines',
'parse',
'plan',
'match']
outf = open("/tmp/Memory.csv", 'w', newline='')
writer = csv.writer(outf)
writer.writerow(['Query',
'TermPos',
'Quadwords',
'Cachelines',
'ExpectedCachelines'])
pos = 0
for qp_row in reader:
term = qp_row[0]
s = frequency[term]
num_rows = int(qp_row[1]) - 1 # TODO: should we subtract 1?
expected_misses = 0
if num_rows == 1:
# private row.
expected_misses = cachelines_per_row
else:
# TODO: consider accounting for actual row density.
for i in range(1, num_rows + 1):
p = 1 - math.pow((1 - math.pow(density - s, i)) * (1 - s), 512)
expected_misses += p * cachelines_per_row
writer.writerow([term,
pos,
qp_row[3], # quadwords
qp_row[4], # cachlines
expected_misses])
|
|
b20be113b5aa491025dd6f2b4ea0a6d0cd1c7343
|
ynr/apps/parties/migrations/0011_add_initial_candidates_counts.py
|
ynr/apps/parties/migrations/0011_add_initial_candidates_counts.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-22 11:39
from __future__ import unicode_literals
from django.db import migrations
def populate_initial_candidate_counts(apps, schema_editor):
Party = apps.get_model("parties", "Party")
parties_qs = Party.objects.all()
for party in parties_qs:
party.total_candidates = party.membership_set.count()
party.current_candidates = party.membership_set.filter(
post_election__election__current=True
).count()
party.save()
class Migration(migrations.Migration):
dependencies = [("parties", "0010_add_candidate_totals")]
operations = [
migrations.RunPython(
populate_initial_candidate_counts, migrations.RunPython.noop
)
]
|
Add initial candidates counts in migration
|
Add initial candidates counts in migration
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
Add initial candidates counts in migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-22 11:39
from __future__ import unicode_literals
from django.db import migrations
def populate_initial_candidate_counts(apps, schema_editor):
Party = apps.get_model("parties", "Party")
parties_qs = Party.objects.all()
for party in parties_qs:
party.total_candidates = party.membership_set.count()
party.current_candidates = party.membership_set.filter(
post_election__election__current=True
).count()
party.save()
class Migration(migrations.Migration):
dependencies = [("parties", "0010_add_candidate_totals")]
operations = [
migrations.RunPython(
populate_initial_candidate_counts, migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add initial candidates counts in migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-22 11:39
from __future__ import unicode_literals
from django.db import migrations
def populate_initial_candidate_counts(apps, schema_editor):
Party = apps.get_model("parties", "Party")
parties_qs = Party.objects.all()
for party in parties_qs:
party.total_candidates = party.membership_set.count()
party.current_candidates = party.membership_set.filter(
post_election__election__current=True
).count()
party.save()
class Migration(migrations.Migration):
dependencies = [("parties", "0010_add_candidate_totals")]
operations = [
migrations.RunPython(
populate_initial_candidate_counts, migrations.RunPython.noop
)
]
|
Add initial candidates counts in migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-22 11:39
from __future__ import unicode_literals
from django.db import migrations
def populate_initial_candidate_counts(apps, schema_editor):
Party = apps.get_model("parties", "Party")
parties_qs = Party.objects.all()
for party in parties_qs:
party.total_candidates = party.membership_set.count()
party.current_candidates = party.membership_set.filter(
post_election__election__current=True
).count()
party.save()
class Migration(migrations.Migration):
dependencies = [("parties", "0010_add_candidate_totals")]
operations = [
migrations.RunPython(
populate_initial_candidate_counts, migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add initial candidates counts in migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-03-22 11:39
from __future__ import unicode_literals
from django.db import migrations
def populate_initial_candidate_counts(apps, schema_editor):
Party = apps.get_model("parties", "Party")
parties_qs = Party.objects.all()
for party in parties_qs:
party.total_candidates = party.membership_set.count()
party.current_candidates = party.membership_set.filter(
post_election__election__current=True
).count()
party.save()
class Migration(migrations.Migration):
dependencies = [("parties", "0010_add_candidate_totals")]
operations = [
migrations.RunPython(
populate_initial_candidate_counts, migrations.RunPython.noop
)
]
|
|
277fd92e8f3a695af1af9ec795a3dd63e21566d9
|
test/unittests/audio/test_vlc_backend.py
|
test/unittests/audio/test_vlc_backend.py
|
import unittest
import unittest.mock as mock
import mycroft.audio.services.vlc as vlc
config = {
'backends': {
'test_simple': {
'type': 'vlc',
'active': True
}
}
}
@mock.patch('mycroft.audio.services.vlc.vlc')
class TestVlcBackend(unittest.TestCase):
def test_load_service(self, mock_vlc_mod):
bus = mock.Mock()
self.assertEqual(len(vlc.load_service(config, bus)), 1)
def test_playlist_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
self.assertTrue(isinstance(service.supported_uris(), list))
# Check that the tracks are added to the track_list
service.add_list(['a.mp3', 'b.ogg', ['c.wav', 'audio/wav']])
service.track_list.add_media.has_calls(['a.mp3', 'b.ogg', 'c.wav'])
# Check that clearing replaces the playlist with an empty one
empty_list = mock.Mock(name='EmptyList')
service.instance.media_list_new.return_value = empty_list
service.clear_list()
self.assertTrue(service.track_list is empty_list)
service.list_player.set_media_list.assert_called_with(empty_list)
def test_playback_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
loop_mode = mock.Mock(name='Loop')
normal_mode = mock.Mock(name='Normal')
mock_vlc_mod.PlaybackMode.loop = loop_mode
mock_vlc_mod.PlaybackMode.default = normal_mode
# Check normal play
service.play(repeat=False)
service.list_player.set_playback_mode.assert_called_with(normal_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check repeat
service.play(repeat=True)
service.list_player.set_playback_mode.assert_called_with(loop_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check pause
service.pause()
service.player.set_pause.assert_called_with(1)
service.player.set_pause.reset_mock()
# Check resume
service.resume()
service.player.set_pause.assert_called_with(0)
service.player.set_pause.reset_mock()
# Check stop
service.player.is_playing.return_value = False
self.assertFalse(service.stop())
service.player.is_playing.return_value = True
self.assertTrue(service.stop())
|
Add tests for vlc audio backend
|
Add tests for vlc audio backend
|
Python
|
apache-2.0
|
forslund/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core
|
Add tests for vlc audio backend
|
import unittest
import unittest.mock as mock
import mycroft.audio.services.vlc as vlc
config = {
'backends': {
'test_simple': {
'type': 'vlc',
'active': True
}
}
}
@mock.patch('mycroft.audio.services.vlc.vlc')
class TestVlcBackend(unittest.TestCase):
def test_load_service(self, mock_vlc_mod):
bus = mock.Mock()
self.assertEqual(len(vlc.load_service(config, bus)), 1)
def test_playlist_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
self.assertTrue(isinstance(service.supported_uris(), list))
# Check that the tracks are added to the track_list
service.add_list(['a.mp3', 'b.ogg', ['c.wav', 'audio/wav']])
service.track_list.add_media.has_calls(['a.mp3', 'b.ogg', 'c.wav'])
# Check that clearing replaces the playlist with an empty one
empty_list = mock.Mock(name='EmptyList')
service.instance.media_list_new.return_value = empty_list
service.clear_list()
self.assertTrue(service.track_list is empty_list)
service.list_player.set_media_list.assert_called_with(empty_list)
def test_playback_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
loop_mode = mock.Mock(name='Loop')
normal_mode = mock.Mock(name='Normal')
mock_vlc_mod.PlaybackMode.loop = loop_mode
mock_vlc_mod.PlaybackMode.default = normal_mode
# Check normal play
service.play(repeat=False)
service.list_player.set_playback_mode.assert_called_with(normal_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check repeat
service.play(repeat=True)
service.list_player.set_playback_mode.assert_called_with(loop_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check pause
service.pause()
service.player.set_pause.assert_called_with(1)
service.player.set_pause.reset_mock()
# Check resume
service.resume()
service.player.set_pause.assert_called_with(0)
service.player.set_pause.reset_mock()
# Check stop
service.player.is_playing.return_value = False
self.assertFalse(service.stop())
service.player.is_playing.return_value = True
self.assertTrue(service.stop())
|
<commit_before><commit_msg>Add tests for vlc audio backend<commit_after>
|
import unittest
import unittest.mock as mock
import mycroft.audio.services.vlc as vlc
config = {
'backends': {
'test_simple': {
'type': 'vlc',
'active': True
}
}
}
@mock.patch('mycroft.audio.services.vlc.vlc')
class TestVlcBackend(unittest.TestCase):
def test_load_service(self, mock_vlc_mod):
bus = mock.Mock()
self.assertEqual(len(vlc.load_service(config, bus)), 1)
def test_playlist_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
self.assertTrue(isinstance(service.supported_uris(), list))
# Check that the tracks are added to the track_list
service.add_list(['a.mp3', 'b.ogg', ['c.wav', 'audio/wav']])
service.track_list.add_media.has_calls(['a.mp3', 'b.ogg', 'c.wav'])
# Check that clearing replaces the playlist with an empty one
empty_list = mock.Mock(name='EmptyList')
service.instance.media_list_new.return_value = empty_list
service.clear_list()
self.assertTrue(service.track_list is empty_list)
service.list_player.set_media_list.assert_called_with(empty_list)
def test_playback_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
loop_mode = mock.Mock(name='Loop')
normal_mode = mock.Mock(name='Normal')
mock_vlc_mod.PlaybackMode.loop = loop_mode
mock_vlc_mod.PlaybackMode.default = normal_mode
# Check normal play
service.play(repeat=False)
service.list_player.set_playback_mode.assert_called_with(normal_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check repeat
service.play(repeat=True)
service.list_player.set_playback_mode.assert_called_with(loop_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check pause
service.pause()
service.player.set_pause.assert_called_with(1)
service.player.set_pause.reset_mock()
# Check resume
service.resume()
service.player.set_pause.assert_called_with(0)
service.player.set_pause.reset_mock()
# Check stop
service.player.is_playing.return_value = False
self.assertFalse(service.stop())
service.player.is_playing.return_value = True
self.assertTrue(service.stop())
|
Add tests for vlc audio backendimport unittest
import unittest.mock as mock
import mycroft.audio.services.vlc as vlc
config = {
'backends': {
'test_simple': {
'type': 'vlc',
'active': True
}
}
}
@mock.patch('mycroft.audio.services.vlc.vlc')
class TestVlcBackend(unittest.TestCase):
def test_load_service(self, mock_vlc_mod):
bus = mock.Mock()
self.assertEqual(len(vlc.load_service(config, bus)), 1)
def test_playlist_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
self.assertTrue(isinstance(service.supported_uris(), list))
# Check that the tracks are added to the track_list
service.add_list(['a.mp3', 'b.ogg', ['c.wav', 'audio/wav']])
service.track_list.add_media.has_calls(['a.mp3', 'b.ogg', 'c.wav'])
# Check that clearing replaces the playlist with an empty one
empty_list = mock.Mock(name='EmptyList')
service.instance.media_list_new.return_value = empty_list
service.clear_list()
self.assertTrue(service.track_list is empty_list)
service.list_player.set_media_list.assert_called_with(empty_list)
def test_playback_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
loop_mode = mock.Mock(name='Loop')
normal_mode = mock.Mock(name='Normal')
mock_vlc_mod.PlaybackMode.loop = loop_mode
mock_vlc_mod.PlaybackMode.default = normal_mode
# Check normal play
service.play(repeat=False)
service.list_player.set_playback_mode.assert_called_with(normal_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check repeat
service.play(repeat=True)
service.list_player.set_playback_mode.assert_called_with(loop_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check pause
service.pause()
service.player.set_pause.assert_called_with(1)
service.player.set_pause.reset_mock()
# Check resume
service.resume()
service.player.set_pause.assert_called_with(0)
service.player.set_pause.reset_mock()
# Check stop
service.player.is_playing.return_value = False
self.assertFalse(service.stop())
service.player.is_playing.return_value = True
self.assertTrue(service.stop())
|
<commit_before><commit_msg>Add tests for vlc audio backend<commit_after>import unittest
import unittest.mock as mock
import mycroft.audio.services.vlc as vlc
config = {
'backends': {
'test_simple': {
'type': 'vlc',
'active': True
}
}
}
@mock.patch('mycroft.audio.services.vlc.vlc')
class TestVlcBackend(unittest.TestCase):
def test_load_service(self, mock_vlc_mod):
bus = mock.Mock()
self.assertEqual(len(vlc.load_service(config, bus)), 1)
def test_playlist_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
self.assertTrue(isinstance(service.supported_uris(), list))
# Check that the tracks are added to the track_list
service.add_list(['a.mp3', 'b.ogg', ['c.wav', 'audio/wav']])
service.track_list.add_media.has_calls(['a.mp3', 'b.ogg', 'c.wav'])
# Check that clearing replaces the playlist with an empty one
empty_list = mock.Mock(name='EmptyList')
service.instance.media_list_new.return_value = empty_list
service.clear_list()
self.assertTrue(service.track_list is empty_list)
service.list_player.set_media_list.assert_called_with(empty_list)
def test_playback_methods(self, mock_vlc_mod):
bus = mock.Mock()
service = vlc.VlcService(config, bus)
loop_mode = mock.Mock(name='Loop')
normal_mode = mock.Mock(name='Normal')
mock_vlc_mod.PlaybackMode.loop = loop_mode
mock_vlc_mod.PlaybackMode.default = normal_mode
# Check normal play
service.play(repeat=False)
service.list_player.set_playback_mode.assert_called_with(normal_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check repeat
service.play(repeat=True)
service.list_player.set_playback_mode.assert_called_with(loop_mode)
service.list_player.set_playback_mode.reset_mock()
self.assertTrue(service.list_player.play.called)
service.list_player.play.reset_mock()
# Check pause
service.pause()
service.player.set_pause.assert_called_with(1)
service.player.set_pause.reset_mock()
# Check resume
service.resume()
service.player.set_pause.assert_called_with(0)
service.player.set_pause.reset_mock()
# Check stop
service.player.is_playing.return_value = False
self.assertFalse(service.stop())
service.player.is_playing.return_value = True
self.assertTrue(service.stop())
|
|
6a67e280fd690dccf4ff5c22460dd0d52b9ec88d
|
napper_kittydar.py
|
napper_kittydar.py
|
import sys, socket, time, logging
import shlex, subprocess
from hdfs import *
logging.basicConfig()
if len(sys.argv) < 4:
print "usage: napper_kittydar <job name> <worker ID> <executable>"
sys.exit(1)
job_name = sys.argv[1]
worker_id = int(sys.argv[2])
kittydar_path = " ".join(sys.argv[3:])
# fetch inputs from HDFS if necessary
hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir'])
# execute program
command = "nodejs %s %s/CAT_0%d" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id)
print "RUNNING: %s" % (command)
subprocess.call(shlex.split(command))
print "Deleting scratch data..."
del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir'])
subprocess.call(shlex.split(del_command))
print "All done -- goodbye from Napper!"
sys.exit(0)
|
Add simplified napper instance for kittydar.
|
Add simplified napper instance for kittydar.
|
Python
|
mit
|
ms705/napper
|
Add simplified napper instance for kittydar.
|
import sys, socket, time, logging
import shlex, subprocess
from hdfs import *
logging.basicConfig()
if len(sys.argv) < 4:
print "usage: napper_kittydar <job name> <worker ID> <executable>"
sys.exit(1)
job_name = sys.argv[1]
worker_id = int(sys.argv[2])
kittydar_path = " ".join(sys.argv[3:])
# fetch inputs from HDFS if necessary
hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir'])
# execute program
command = "nodejs %s %s/CAT_0%d" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id)
print "RUNNING: %s" % (command)
subprocess.call(shlex.split(command))
print "Deleting scratch data..."
del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir'])
subprocess.call(shlex.split(del_command))
print "All done -- goodbye from Napper!"
sys.exit(0)
|
<commit_before><commit_msg>Add simplified napper instance for kittydar.<commit_after>
|
import sys, socket, time, logging
import shlex, subprocess
from hdfs import *
logging.basicConfig()
if len(sys.argv) < 4:
print "usage: napper_kittydar <job name> <worker ID> <executable>"
sys.exit(1)
job_name = sys.argv[1]
worker_id = int(sys.argv[2])
kittydar_path = " ".join(sys.argv[3:])
# fetch inputs from HDFS if necessary
hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir'])
# execute program
command = "nodejs %s %s/CAT_0%d" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id)
print "RUNNING: %s" % (command)
subprocess.call(shlex.split(command))
print "Deleting scratch data..."
del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir'])
subprocess.call(shlex.split(del_command))
print "All done -- goodbye from Napper!"
sys.exit(0)
|
Add simplified napper instance for kittydar.import sys, socket, time, logging
import shlex, subprocess
from hdfs import *
logging.basicConfig()
if len(sys.argv) < 4:
print "usage: napper_kittydar <job name> <worker ID> <executable>"
sys.exit(1)
job_name = sys.argv[1]
worker_id = int(sys.argv[2])
kittydar_path = " ".join(sys.argv[3:])
# fetch inputs from HDFS if necessary
hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir'])
# execute program
command = "nodejs %s %s/CAT_0%d" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id)
print "RUNNING: %s" % (command)
subprocess.call(shlex.split(command))
print "Deleting scratch data..."
del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir'])
subprocess.call(shlex.split(del_command))
print "All done -- goodbye from Napper!"
sys.exit(0)
|
<commit_before><commit_msg>Add simplified napper instance for kittydar.<commit_after>import sys, socket, time, logging
import shlex, subprocess
from hdfs import *
logging.basicConfig()
if len(sys.argv) < 4:
print "usage: napper_kittydar <job name> <worker ID> <executable>"
sys.exit(1)
job_name = sys.argv[1]
worker_id = int(sys.argv[2])
kittydar_path = " ".join(sys.argv[3:])
# fetch inputs from HDFS if necessary
hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir'])
# execute program
command = "nodejs %s %s/CAT_0%d" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id)
print "RUNNING: %s" % (command)
subprocess.call(shlex.split(command))
print "Deleting scratch data..."
del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir'])
subprocess.call(shlex.split(del_command))
print "All done -- goodbye from Napper!"
sys.exit(0)
|
|
d78d944038dfa768a6aac5dc531d5220e6883a11
|
tests/unit/models/reddit/test_modmail.py
|
tests/unit/models/reddit/test_modmail.py
|
from praw.models import ModmailConversation
from ... import UnitTest
class TestModmailConversation(UnitTest):
def test_parse(self):
conversation = ModmailConversation(self.reddit,
_data={'id': 'ik72'})
assert str(conversation) == 'ik72'
|
Test that ModmailConversation.id is preserved after fetch
|
Test that ModmailConversation.id is preserved after fetch
|
Python
|
bsd-2-clause
|
gschizas/praw,13steinj/praw,13steinj/praw,praw-dev/praw,gschizas/praw,darthkedrik/praw,praw-dev/praw,leviroth/praw,leviroth/praw,darthkedrik/praw
|
Test that ModmailConversation.id is preserved after fetch
|
from praw.models import ModmailConversation
from ... import UnitTest
class TestModmailConversation(UnitTest):
def test_parse(self):
conversation = ModmailConversation(self.reddit,
_data={'id': 'ik72'})
assert str(conversation) == 'ik72'
|
<commit_before><commit_msg>Test that ModmailConversation.id is preserved after fetch<commit_after>
|
from praw.models import ModmailConversation
from ... import UnitTest
class TestModmailConversation(UnitTest):
def test_parse(self):
conversation = ModmailConversation(self.reddit,
_data={'id': 'ik72'})
assert str(conversation) == 'ik72'
|
Test that ModmailConversation.id is preserved after fetchfrom praw.models import ModmailConversation
from ... import UnitTest
class TestModmailConversation(UnitTest):
def test_parse(self):
conversation = ModmailConversation(self.reddit,
_data={'id': 'ik72'})
assert str(conversation) == 'ik72'
|
<commit_before><commit_msg>Test that ModmailConversation.id is preserved after fetch<commit_after>from praw.models import ModmailConversation
from ... import UnitTest
class TestModmailConversation(UnitTest):
def test_parse(self):
conversation = ModmailConversation(self.reddit,
_data={'id': 'ik72'})
assert str(conversation) == 'ik72'
|
|
c15293b0b68ebee62b148ac8c8e1930d17cd27c7
|
py/heaters.py
|
py/heaters.py
|
class Solution(object):
def findRadius(self, houses, heaters):
"""
:type houses: List[int]
:type heaters: List[int]
:rtype: int
"""
heaters.sort()
lh = len(heaters)
ans = 0
for h in houses:
L, U = -1, lh
while L + 1 < U:
mid = L + (U - L) / 2
if heaters[mid] > h:
U = mid
else:
L = mid
m = None
if L >= 0:
d = abs(heaters[L] - h)
if m is None:
m = d
else:
m = min(m, d)
if U < lh:
d = abs(heaters[U] - h)
if m is None:
m = d
else:
m = min(m, d)
ans = max(m, ans)
return ans
|
Add py solution for 475. Heaters
|
Add py solution for 475. Heaters
475. Heaters: https://leetcode.com/problems/heaters/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 475. Heaters
475. Heaters: https://leetcode.com/problems/heaters/
|
class Solution(object):
def findRadius(self, houses, heaters):
"""
:type houses: List[int]
:type heaters: List[int]
:rtype: int
"""
heaters.sort()
lh = len(heaters)
ans = 0
for h in houses:
L, U = -1, lh
while L + 1 < U:
mid = L + (U - L) / 2
if heaters[mid] > h:
U = mid
else:
L = mid
m = None
if L >= 0:
d = abs(heaters[L] - h)
if m is None:
m = d
else:
m = min(m, d)
if U < lh:
d = abs(heaters[U] - h)
if m is None:
m = d
else:
m = min(m, d)
ans = max(m, ans)
return ans
|
<commit_before><commit_msg>Add py solution for 475. Heaters
475. Heaters: https://leetcode.com/problems/heaters/<commit_after>
|
class Solution(object):
def findRadius(self, houses, heaters):
"""
:type houses: List[int]
:type heaters: List[int]
:rtype: int
"""
heaters.sort()
lh = len(heaters)
ans = 0
for h in houses:
L, U = -1, lh
while L + 1 < U:
mid = L + (U - L) / 2
if heaters[mid] > h:
U = mid
else:
L = mid
m = None
if L >= 0:
d = abs(heaters[L] - h)
if m is None:
m = d
else:
m = min(m, d)
if U < lh:
d = abs(heaters[U] - h)
if m is None:
m = d
else:
m = min(m, d)
ans = max(m, ans)
return ans
|
Add py solution for 475. Heaters
475. Heaters: https://leetcode.com/problems/heaters/class Solution(object):
def findRadius(self, houses, heaters):
"""
:type houses: List[int]
:type heaters: List[int]
:rtype: int
"""
heaters.sort()
lh = len(heaters)
ans = 0
for h in houses:
L, U = -1, lh
while L + 1 < U:
mid = L + (U - L) / 2
if heaters[mid] > h:
U = mid
else:
L = mid
m = None
if L >= 0:
d = abs(heaters[L] - h)
if m is None:
m = d
else:
m = min(m, d)
if U < lh:
d = abs(heaters[U] - h)
if m is None:
m = d
else:
m = min(m, d)
ans = max(m, ans)
return ans
|
<commit_before><commit_msg>Add py solution for 475. Heaters
475. Heaters: https://leetcode.com/problems/heaters/<commit_after>class Solution(object):
def findRadius(self, houses, heaters):
"""
:type houses: List[int]
:type heaters: List[int]
:rtype: int
"""
heaters.sort()
lh = len(heaters)
ans = 0
for h in houses:
L, U = -1, lh
while L + 1 < U:
mid = L + (U - L) / 2
if heaters[mid] > h:
U = mid
else:
L = mid
m = None
if L >= 0:
d = abs(heaters[L] - h)
if m is None:
m = d
else:
m = min(m, d)
if U < lh:
d = abs(heaters[U] - h)
if m is None:
m = d
else:
m = min(m, d)
ans = max(m, ans)
return ans
|
|
38d17a3d7cf98d69b2366b50bc3994de2f505cca
|
core/migrations/0029_machinerequest_new_version_scripts.py
|
core/migrations/0029_machinerequest_new_version_scripts.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_add_version_license_and_scripts'),
]
operations = [
migrations.AddField(
model_name='machinerequest',
name='new_version_scripts',
field=models.ManyToManyField(to='core.BootScript', blank=True),
),
]
|
Add boot script and licensing support to API v1
|
Add boot script and licensing support to API v1
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
Add boot script and licensing support to API v1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_add_version_license_and_scripts'),
]
operations = [
migrations.AddField(
model_name='machinerequest',
name='new_version_scripts',
field=models.ManyToManyField(to='core.BootScript', blank=True),
),
]
|
<commit_before><commit_msg>Add boot script and licensing support to API v1<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_add_version_license_and_scripts'),
]
operations = [
migrations.AddField(
model_name='machinerequest',
name='new_version_scripts',
field=models.ManyToManyField(to='core.BootScript', blank=True),
),
]
|
Add boot script and licensing support to API v1# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_add_version_license_and_scripts'),
]
operations = [
migrations.AddField(
model_name='machinerequest',
name='new_version_scripts',
field=models.ManyToManyField(to='core.BootScript', blank=True),
),
]
|
<commit_before><commit_msg>Add boot script and licensing support to API v1<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0028_add_version_license_and_scripts'),
]
operations = [
migrations.AddField(
model_name='machinerequest',
name='new_version_scripts',
field=models.ManyToManyField(to='core.BootScript', blank=True),
),
]
|
|
34c179a2db3fa1b2419869b2f77f37de272d21d5
|
voteswap/tests/test_sendgrid_backend.py
|
voteswap/tests/test_sendgrid_backend.py
|
from django.core.mail import EmailMultiAlternatives
from django.test import override_settings
from django.test import TestCase
from mock import patch
from mock import MagicMock
@override_settings(
EMAIL_BACKEND='voteswap.mail.backends.sendgrid.SendGridBackend')
class TestEmailBase(TestCase):
def setUp(self):
self.patched_client_instance = MagicMock()
self.patcher = patch(
'voteswap.mail.backends.sendgrid.SendGridAPIClient',
return_value=self.patched_client_instance)
self.patched_client = self.patcher.start()
def tearDown(self):
self.patcher.stop()
def _get_email(self):
message = EmailMultiAlternatives(
from_email=u'noreply@email.voteswap.us',
to=['to@example.com'],
reply_to=['replyto@example.com'],
subject=u"Test email",
body='Test body')
message.attach_alternative('<html>html body</html>', 'text/html')
return message
def test_called(self):
from voteswap.mail.backends.sendgrid import SendGridBackend
with patch.object(SendGridBackend, 'send_messages') as mock_sendgrid:
message = self._get_email()
message.send()
mock_sendgrid.assert_called_with([message])
def test_apikey(self):
with override_settings(SENDGRID_API_KEY='foobar'):
message = self._get_email()
message.send()
self.patched_client.assert_called_with(apikey='foobar')
def test_send(self):
message = self._get_email()
message.send()
send_mail_request = (
self.patched_client_instance.client.mail.send.post.call_args[1])
content = send_mail_request['request_body']
self.assertEqual(content['from']['email'], message.from_email)
self.assertEqual(content['personalizations'][0]['to'][0]['email'],
message.recipients()[0])
self.assertEqual(len(content['content']), 2)
for body in content['content']:
if body['type'] == 'text/plain':
self.assertEqual(body['value'], message.body)
else:
self.assertEqual(body['value'], message.alternatives[0][0])
|
Test the sendgrid email backend
|
Test the sendgrid email backend
|
Python
|
mit
|
sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap
|
Test the sendgrid email backend
|
from django.core.mail import EmailMultiAlternatives
from django.test import override_settings
from django.test import TestCase
from mock import patch
from mock import MagicMock
@override_settings(
EMAIL_BACKEND='voteswap.mail.backends.sendgrid.SendGridBackend')
class TestEmailBase(TestCase):
def setUp(self):
self.patched_client_instance = MagicMock()
self.patcher = patch(
'voteswap.mail.backends.sendgrid.SendGridAPIClient',
return_value=self.patched_client_instance)
self.patched_client = self.patcher.start()
def tearDown(self):
self.patcher.stop()
def _get_email(self):
message = EmailMultiAlternatives(
from_email=u'noreply@email.voteswap.us',
to=['to@example.com'],
reply_to=['replyto@example.com'],
subject=u"Test email",
body='Test body')
message.attach_alternative('<html>html body</html>', 'text/html')
return message
def test_called(self):
from voteswap.mail.backends.sendgrid import SendGridBackend
with patch.object(SendGridBackend, 'send_messages') as mock_sendgrid:
message = self._get_email()
message.send()
mock_sendgrid.assert_called_with([message])
def test_apikey(self):
with override_settings(SENDGRID_API_KEY='foobar'):
message = self._get_email()
message.send()
self.patched_client.assert_called_with(apikey='foobar')
def test_send(self):
message = self._get_email()
message.send()
send_mail_request = (
self.patched_client_instance.client.mail.send.post.call_args[1])
content = send_mail_request['request_body']
self.assertEqual(content['from']['email'], message.from_email)
self.assertEqual(content['personalizations'][0]['to'][0]['email'],
message.recipients()[0])
self.assertEqual(len(content['content']), 2)
for body in content['content']:
if body['type'] == 'text/plain':
self.assertEqual(body['value'], message.body)
else:
self.assertEqual(body['value'], message.alternatives[0][0])
|
<commit_before><commit_msg>Test the sendgrid email backend<commit_after>
|
from django.core.mail import EmailMultiAlternatives
from django.test import override_settings
from django.test import TestCase
from mock import patch
from mock import MagicMock
@override_settings(
EMAIL_BACKEND='voteswap.mail.backends.sendgrid.SendGridBackend')
class TestEmailBase(TestCase):
def setUp(self):
self.patched_client_instance = MagicMock()
self.patcher = patch(
'voteswap.mail.backends.sendgrid.SendGridAPIClient',
return_value=self.patched_client_instance)
self.patched_client = self.patcher.start()
def tearDown(self):
self.patcher.stop()
def _get_email(self):
message = EmailMultiAlternatives(
from_email=u'noreply@email.voteswap.us',
to=['to@example.com'],
reply_to=['replyto@example.com'],
subject=u"Test email",
body='Test body')
message.attach_alternative('<html>html body</html>', 'text/html')
return message
def test_called(self):
from voteswap.mail.backends.sendgrid import SendGridBackend
with patch.object(SendGridBackend, 'send_messages') as mock_sendgrid:
message = self._get_email()
message.send()
mock_sendgrid.assert_called_with([message])
def test_apikey(self):
with override_settings(SENDGRID_API_KEY='foobar'):
message = self._get_email()
message.send()
self.patched_client.assert_called_with(apikey='foobar')
def test_send(self):
message = self._get_email()
message.send()
send_mail_request = (
self.patched_client_instance.client.mail.send.post.call_args[1])
content = send_mail_request['request_body']
self.assertEqual(content['from']['email'], message.from_email)
self.assertEqual(content['personalizations'][0]['to'][0]['email'],
message.recipients()[0])
self.assertEqual(len(content['content']), 2)
for body in content['content']:
if body['type'] == 'text/plain':
self.assertEqual(body['value'], message.body)
else:
self.assertEqual(body['value'], message.alternatives[0][0])
|
Test the sendgrid email backendfrom django.core.mail import EmailMultiAlternatives
from django.test import override_settings
from django.test import TestCase
from mock import patch
from mock import MagicMock
@override_settings(
EMAIL_BACKEND='voteswap.mail.backends.sendgrid.SendGridBackend')
class TestEmailBase(TestCase):
def setUp(self):
self.patched_client_instance = MagicMock()
self.patcher = patch(
'voteswap.mail.backends.sendgrid.SendGridAPIClient',
return_value=self.patched_client_instance)
self.patched_client = self.patcher.start()
def tearDown(self):
self.patcher.stop()
def _get_email(self):
message = EmailMultiAlternatives(
from_email=u'noreply@email.voteswap.us',
to=['to@example.com'],
reply_to=['replyto@example.com'],
subject=u"Test email",
body='Test body')
message.attach_alternative('<html>html body</html>', 'text/html')
return message
def test_called(self):
from voteswap.mail.backends.sendgrid import SendGridBackend
with patch.object(SendGridBackend, 'send_messages') as mock_sendgrid:
message = self._get_email()
message.send()
mock_sendgrid.assert_called_with([message])
def test_apikey(self):
with override_settings(SENDGRID_API_KEY='foobar'):
message = self._get_email()
message.send()
self.patched_client.assert_called_with(apikey='foobar')
def test_send(self):
message = self._get_email()
message.send()
send_mail_request = (
self.patched_client_instance.client.mail.send.post.call_args[1])
content = send_mail_request['request_body']
self.assertEqual(content['from']['email'], message.from_email)
self.assertEqual(content['personalizations'][0]['to'][0]['email'],
message.recipients()[0])
self.assertEqual(len(content['content']), 2)
for body in content['content']:
if body['type'] == 'text/plain':
self.assertEqual(body['value'], message.body)
else:
self.assertEqual(body['value'], message.alternatives[0][0])
|
<commit_before><commit_msg>Test the sendgrid email backend<commit_after>from django.core.mail import EmailMultiAlternatives
from django.test import override_settings
from django.test import TestCase
from mock import patch
from mock import MagicMock
@override_settings(
EMAIL_BACKEND='voteswap.mail.backends.sendgrid.SendGridBackend')
class TestEmailBase(TestCase):
def setUp(self):
self.patched_client_instance = MagicMock()
self.patcher = patch(
'voteswap.mail.backends.sendgrid.SendGridAPIClient',
return_value=self.patched_client_instance)
self.patched_client = self.patcher.start()
def tearDown(self):
self.patcher.stop()
def _get_email(self):
message = EmailMultiAlternatives(
from_email=u'noreply@email.voteswap.us',
to=['to@example.com'],
reply_to=['replyto@example.com'],
subject=u"Test email",
body='Test body')
message.attach_alternative('<html>html body</html>', 'text/html')
return message
def test_called(self):
from voteswap.mail.backends.sendgrid import SendGridBackend
with patch.object(SendGridBackend, 'send_messages') as mock_sendgrid:
message = self._get_email()
message.send()
mock_sendgrid.assert_called_with([message])
def test_apikey(self):
with override_settings(SENDGRID_API_KEY='foobar'):
message = self._get_email()
message.send()
self.patched_client.assert_called_with(apikey='foobar')
def test_send(self):
message = self._get_email()
message.send()
send_mail_request = (
self.patched_client_instance.client.mail.send.post.call_args[1])
content = send_mail_request['request_body']
self.assertEqual(content['from']['email'], message.from_email)
self.assertEqual(content['personalizations'][0]['to'][0]['email'],
message.recipients()[0])
self.assertEqual(len(content['content']), 2)
for body in content['content']:
if body['type'] == 'text/plain':
self.assertEqual(body['value'], message.body)
else:
self.assertEqual(body['value'], message.alternatives[0][0])
|
|
8803f6058255237dff39549426ca6a513a25193c
|
website_product_supplier/__openerp__.py
|
website_product_supplier/__openerp__.py
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
Add images key in manifest file
|
[FIX] website_product_supplier: Add images key in manifest file
|
Python
|
agpl-3.0
|
nuobit/website,open-synergy/website,gfcapalbo/website,LasLabs/website,acsone/website,nuobit/website,LasLabs/website,Yajo/website,LasLabs/website,gfcapalbo/website,kaerdsar/website,Yajo/website,nuobit/website,nuobit/website,Yajo/website,gfcapalbo/website,acsone/website,kaerdsar/website,LasLabs/website,open-synergy/website,acsone/website,acsone/website,gfcapalbo/website,open-synergy/website,brain-tec/website,brain-tec/website,open-synergy/website,brain-tec/website,kaerdsar/website,brain-tec/website,Yajo/website
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
[FIX] website_product_supplier: Add images key in manifest file
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
<commit_msg>[FIX] website_product_supplier: Add images key in manifest file<commit_after>
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
[FIX] website_product_supplier: Add images key in manifest file# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
<commit_msg>[FIX] website_product_supplier: Add images key in manifest file<commit_after># -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
d758cab051d9f6b36daa39bb061cc17bbb3e53b9
|
locations/spiders/jefferson_univ_hosp.py
|
locations/spiders/jefferson_univ_hosp.py
|
# -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class jefferson_univ_hosp(scrapy.Spider):
name = "jefferson_univ_hosp"
allowed_domains = ['https://hospitals.jefferson.edu']
start_urls = [
'https://hospitals.jefferson.edu/find-a-location.html',
]
def parse(self, response):
data = " ".join(response.xpath('//script[contains(text(), "itemsArray.push")]/text()').extract())
locations = re.findall(r"itemsArray.push\((.+?)\);",data)
for loc in locations:
if len(loc.split(";")) == 6:
loctype, locname, html, url, lat, lon = loc.strip("'").split(";")
phone = re.search(r"Phone:.*?([\d\-]+?)</p>", html)
if phone:
phone = phone.groups()[0]
postcode = re.search(r"<br>.+?,.+?(\d{5})</p>", html)
if postcode:
postcode = postcode.groups()[0]
addr_full = re.search(r"</h3><p>(.+?)<br>", html)
if addr_full:
addr_full = addr_full.groups()[0]
properties = {
'name': locname,
'ref': loctype + "_" + locname,
'addr_full': addr_full if addr_full else re.search(r"</h3> <p>(.+?)<br>", html).groups()[0],
'city': re.search(r"<br>(.+?),", html).groups()[0],
'state': re.search(r",(\s\D{2})", html).groups()[0].strip(),
'postcode': postcode if postcode else None,
'phone': phone if phone else None,
'website': url,
'lat': float(lat),
'lon': float(lon),
}
else:
loctype, html = loc.strip("'").split(";")
locname, addr_full = html.split("(")
properties = {
'name': locname,
'ref': loc,
'addr_full': addr_full
}
yield GeojsonPointItem(**properties)
|
Add spider for Thomas Jefferson University and Hospitals
|
Add spider for Thomas Jefferson University and Hospitals
|
Python
|
mit
|
iandees/all-the-places,iandees/all-the-places,iandees/all-the-places
|
Add spider for Thomas Jefferson University and Hospitals
|
# -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class jefferson_univ_hosp(scrapy.Spider):
name = "jefferson_univ_hosp"
allowed_domains = ['https://hospitals.jefferson.edu']
start_urls = [
'https://hospitals.jefferson.edu/find-a-location.html',
]
def parse(self, response):
data = " ".join(response.xpath('//script[contains(text(), "itemsArray.push")]/text()').extract())
locations = re.findall(r"itemsArray.push\((.+?)\);",data)
for loc in locations:
if len(loc.split(";")) == 6:
loctype, locname, html, url, lat, lon = loc.strip("'").split(";")
phone = re.search(r"Phone:.*?([\d\-]+?)</p>", html)
if phone:
phone = phone.groups()[0]
postcode = re.search(r"<br>.+?,.+?(\d{5})</p>", html)
if postcode:
postcode = postcode.groups()[0]
addr_full = re.search(r"</h3><p>(.+?)<br>", html)
if addr_full:
addr_full = addr_full.groups()[0]
properties = {
'name': locname,
'ref': loctype + "_" + locname,
'addr_full': addr_full if addr_full else re.search(r"</h3> <p>(.+?)<br>", html).groups()[0],
'city': re.search(r"<br>(.+?),", html).groups()[0],
'state': re.search(r",(\s\D{2})", html).groups()[0].strip(),
'postcode': postcode if postcode else None,
'phone': phone if phone else None,
'website': url,
'lat': float(lat),
'lon': float(lon),
}
else:
loctype, html = loc.strip("'").split(";")
locname, addr_full = html.split("(")
properties = {
'name': locname,
'ref': loc,
'addr_full': addr_full
}
yield GeojsonPointItem(**properties)
|
<commit_before><commit_msg>Add spider for Thomas Jefferson University and Hospitals<commit_after>
|
# -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class jefferson_univ_hosp(scrapy.Spider):
name = "jefferson_univ_hosp"
allowed_domains = ['https://hospitals.jefferson.edu']
start_urls = [
'https://hospitals.jefferson.edu/find-a-location.html',
]
def parse(self, response):
data = " ".join(response.xpath('//script[contains(text(), "itemsArray.push")]/text()').extract())
locations = re.findall(r"itemsArray.push\((.+?)\);",data)
for loc in locations:
if len(loc.split(";")) == 6:
loctype, locname, html, url, lat, lon = loc.strip("'").split(";")
phone = re.search(r"Phone:.*?([\d\-]+?)</p>", html)
if phone:
phone = phone.groups()[0]
postcode = re.search(r"<br>.+?,.+?(\d{5})</p>", html)
if postcode:
postcode = postcode.groups()[0]
addr_full = re.search(r"</h3><p>(.+?)<br>", html)
if addr_full:
addr_full = addr_full.groups()[0]
properties = {
'name': locname,
'ref': loctype + "_" + locname,
'addr_full': addr_full if addr_full else re.search(r"</h3> <p>(.+?)<br>", html).groups()[0],
'city': re.search(r"<br>(.+?),", html).groups()[0],
'state': re.search(r",(\s\D{2})", html).groups()[0].strip(),
'postcode': postcode if postcode else None,
'phone': phone if phone else None,
'website': url,
'lat': float(lat),
'lon': float(lon),
}
else:
loctype, html = loc.strip("'").split(";")
locname, addr_full = html.split("(")
properties = {
'name': locname,
'ref': loc,
'addr_full': addr_full
}
yield GeojsonPointItem(**properties)
|
Add spider for Thomas Jefferson University and Hospitals# -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class jefferson_univ_hosp(scrapy.Spider):
name = "jefferson_univ_hosp"
allowed_domains = ['https://hospitals.jefferson.edu']
start_urls = [
'https://hospitals.jefferson.edu/find-a-location.html',
]
def parse(self, response):
data = " ".join(response.xpath('//script[contains(text(), "itemsArray.push")]/text()').extract())
locations = re.findall(r"itemsArray.push\((.+?)\);",data)
for loc in locations:
if len(loc.split(";")) == 6:
loctype, locname, html, url, lat, lon = loc.strip("'").split(";")
phone = re.search(r"Phone:.*?([\d\-]+?)</p>", html)
if phone:
phone = phone.groups()[0]
postcode = re.search(r"<br>.+?,.+?(\d{5})</p>", html)
if postcode:
postcode = postcode.groups()[0]
addr_full = re.search(r"</h3><p>(.+?)<br>", html)
if addr_full:
addr_full = addr_full.groups()[0]
properties = {
'name': locname,
'ref': loctype + "_" + locname,
'addr_full': addr_full if addr_full else re.search(r"</h3> <p>(.+?)<br>", html).groups()[0],
'city': re.search(r"<br>(.+?),", html).groups()[0],
'state': re.search(r",(\s\D{2})", html).groups()[0].strip(),
'postcode': postcode if postcode else None,
'phone': phone if phone else None,
'website': url,
'lat': float(lat),
'lon': float(lon),
}
else:
loctype, html = loc.strip("'").split(";")
locname, addr_full = html.split("(")
properties = {
'name': locname,
'ref': loc,
'addr_full': addr_full
}
yield GeojsonPointItem(**properties)
|
<commit_before><commit_msg>Add spider for Thomas Jefferson University and Hospitals<commit_after># -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
class jefferson_univ_hosp(scrapy.Spider):
name = "jefferson_univ_hosp"
allowed_domains = ['https://hospitals.jefferson.edu']
start_urls = [
'https://hospitals.jefferson.edu/find-a-location.html',
]
def parse(self, response):
data = " ".join(response.xpath('//script[contains(text(), "itemsArray.push")]/text()').extract())
locations = re.findall(r"itemsArray.push\((.+?)\);",data)
for loc in locations:
if len(loc.split(";")) == 6:
loctype, locname, html, url, lat, lon = loc.strip("'").split(";")
phone = re.search(r"Phone:.*?([\d\-]+?)</p>", html)
if phone:
phone = phone.groups()[0]
postcode = re.search(r"<br>.+?,.+?(\d{5})</p>", html)
if postcode:
postcode = postcode.groups()[0]
addr_full = re.search(r"</h3><p>(.+?)<br>", html)
if addr_full:
addr_full = addr_full.groups()[0]
properties = {
'name': locname,
'ref': loctype + "_" + locname,
'addr_full': addr_full if addr_full else re.search(r"</h3> <p>(.+?)<br>", html).groups()[0],
'city': re.search(r"<br>(.+?),", html).groups()[0],
'state': re.search(r",(\s\D{2})", html).groups()[0].strip(),
'postcode': postcode if postcode else None,
'phone': phone if phone else None,
'website': url,
'lat': float(lat),
'lon': float(lon),
}
else:
loctype, html = loc.strip("'").split(";")
locname, addr_full = html.split("(")
properties = {
'name': locname,
'ref': loc,
'addr_full': addr_full
}
yield GeojsonPointItem(**properties)
|
|
d98f57c7be175df8103c7f29fac4e39057ff4891
|
website/upload_local_replays.py
|
website/upload_local_replays.py
|
""" Pull replay from rocketleaguereplays and post turbo-carnival """
import StringIO
import os
import sys
import traceback
import requests
import boto3
# To setup your keys
# aws configure
DYNAMO = boto3.resource("dynamodb")
TABLE = DYNAMO.Table('turbo-carnival')
REPLAY_DIR = "C:\Users\Carlos\Documents\My Games\Rocket League\TAGame\Demos"
def getReplay(replay_dir, replay_filename):
""" Grabs an individual replay and posts to turbo-carnival"""
files = {'file': open(replay_dir + "\\" + replay_filename, 'rb')}
post = requests.post("http://rocketleague.carlosrendon.me/upload?bulk=true", files=files)
if str(post.status_code) != '200':
print("upload failed: " + str(post.status_code))
def getAllReplays(replay_dir):
""" Get's all of the replays """
for replay_file in os.listdir(replay_dir):
print(replay_file)
replay = replay_file.replace(".replay","")
try:
ret = TABLE.get_item(Key={"replay_key": replay})
except:
traceback.print_exc(file=sys.stderr)
exit(1)
if not ret.has_key('Item'):
print("Grabbing " + replay)
getReplay(replay_dir, replay_file)
else:
print("Skipping " + replay)
getAllReplays(REPLAY_DIR)
|
Add script to upload local replays
|
Add script to upload local replays
|
Python
|
mit
|
mrcarlosrendon/turbo-carnival,mrcarlosrendon/turbo-carnival,mrcarlosrendon/turbo-carnival
|
Add script to upload local replays
|
""" Pull replay from rocketleaguereplays and post turbo-carnival """
import StringIO
import os
import sys
import traceback
import requests
import boto3
# To setup your keys
# aws configure
DYNAMO = boto3.resource("dynamodb")
TABLE = DYNAMO.Table('turbo-carnival')
REPLAY_DIR = "C:\Users\Carlos\Documents\My Games\Rocket League\TAGame\Demos"
def getReplay(replay_dir, replay_filename):
""" Grabs an individual replay and posts to turbo-carnival"""
files = {'file': open(replay_dir + "\\" + replay_filename, 'rb')}
post = requests.post("http://rocketleague.carlosrendon.me/upload?bulk=true", files=files)
if str(post.status_code) != '200':
print("upload failed: " + str(post.status_code))
def getAllReplays(replay_dir):
""" Get's all of the replays """
for replay_file in os.listdir(replay_dir):
print(replay_file)
replay = replay_file.replace(".replay","")
try:
ret = TABLE.get_item(Key={"replay_key": replay})
except:
traceback.print_exc(file=sys.stderr)
exit(1)
if not ret.has_key('Item'):
print("Grabbing " + replay)
getReplay(replay_dir, replay_file)
else:
print("Skipping " + replay)
getAllReplays(REPLAY_DIR)
|
<commit_before><commit_msg>Add script to upload local replays<commit_after>
|
""" Pull replay from rocketleaguereplays and post turbo-carnival """
import StringIO
import os
import sys
import traceback
import requests
import boto3
# To setup your keys
# aws configure
DYNAMO = boto3.resource("dynamodb")
TABLE = DYNAMO.Table('turbo-carnival')
REPLAY_DIR = "C:\Users\Carlos\Documents\My Games\Rocket League\TAGame\Demos"
def getReplay(replay_dir, replay_filename):
""" Grabs an individual replay and posts to turbo-carnival"""
files = {'file': open(replay_dir + "\\" + replay_filename, 'rb')}
post = requests.post("http://rocketleague.carlosrendon.me/upload?bulk=true", files=files)
if str(post.status_code) != '200':
print("upload failed: " + str(post.status_code))
def getAllReplays(replay_dir):
""" Get's all of the replays """
for replay_file in os.listdir(replay_dir):
print(replay_file)
replay = replay_file.replace(".replay","")
try:
ret = TABLE.get_item(Key={"replay_key": replay})
except:
traceback.print_exc(file=sys.stderr)
exit(1)
if not ret.has_key('Item'):
print("Grabbing " + replay)
getReplay(replay_dir, replay_file)
else:
print("Skipping " + replay)
getAllReplays(REPLAY_DIR)
|
Add script to upload local replays""" Pull replay from rocketleaguereplays and post turbo-carnival """
import StringIO
import os
import sys
import traceback
import requests
import boto3
# To setup your keys
# aws configure
DYNAMO = boto3.resource("dynamodb")
TABLE = DYNAMO.Table('turbo-carnival')
REPLAY_DIR = "C:\Users\Carlos\Documents\My Games\Rocket League\TAGame\Demos"
def getReplay(replay_dir, replay_filename):
""" Grabs an individual replay and posts to turbo-carnival"""
files = {'file': open(replay_dir + "\\" + replay_filename, 'rb')}
post = requests.post("http://rocketleague.carlosrendon.me/upload?bulk=true", files=files)
if str(post.status_code) != '200':
print("upload failed: " + str(post.status_code))
def getAllReplays(replay_dir):
""" Get's all of the replays """
for replay_file in os.listdir(replay_dir):
print(replay_file)
replay = replay_file.replace(".replay","")
try:
ret = TABLE.get_item(Key={"replay_key": replay})
except:
traceback.print_exc(file=sys.stderr)
exit(1)
if not ret.has_key('Item'):
print("Grabbing " + replay)
getReplay(replay_dir, replay_file)
else:
print("Skipping " + replay)
getAllReplays(REPLAY_DIR)
|
<commit_before><commit_msg>Add script to upload local replays<commit_after>""" Pull replay from rocketleaguereplays and post turbo-carnival """
import StringIO
import os
import sys
import traceback
import requests
import boto3
# To setup your keys
# aws configure
DYNAMO = boto3.resource("dynamodb")
TABLE = DYNAMO.Table('turbo-carnival')
REPLAY_DIR = "C:\Users\Carlos\Documents\My Games\Rocket League\TAGame\Demos"
def getReplay(replay_dir, replay_filename):
""" Grabs an individual replay and posts to turbo-carnival"""
files = {'file': open(replay_dir + "\\" + replay_filename, 'rb')}
post = requests.post("http://rocketleague.carlosrendon.me/upload?bulk=true", files=files)
if str(post.status_code) != '200':
print("upload failed: " + str(post.status_code))
def getAllReplays(replay_dir):
""" Get's all of the replays """
for replay_file in os.listdir(replay_dir):
print(replay_file)
replay = replay_file.replace(".replay","")
try:
ret = TABLE.get_item(Key={"replay_key": replay})
except:
traceback.print_exc(file=sys.stderr)
exit(1)
if not ret.has_key('Item'):
print("Grabbing " + replay)
getReplay(replay_dir, replay_file)
else:
print("Skipping " + replay)
getAllReplays(REPLAY_DIR)
|
|
bf0a8d5a13ecf7acbb646696a602a24898793294
|
newsroom/management/commands/setsite.py
|
newsroom/management/commands/setsite.py
|
from django.core.management.base import BaseCommand, CommandError
from newsroom.models import Article
from django.contrib.sites.models import Site
def process(site_name, site_domain):
site = Site.objects.get_current()
site.name = site_name
site.domain = site_domain
site.save()
class Command(BaseCommand):
help = 'Set name and domain of site'
def add_arguments(self, parser):
parser.add_argument('site_name')
parser.add_argument('site_domain')
def handle(self, *args, **options):
process(options["site_name"], options["site_domain"])
|
Change site name and domain from command line.
|
Change site name and domain from command line.
|
Python
|
bsd-3-clause
|
groundupnews/gu,groundupnews/gu,groundupnews/gu,groundupnews/gu,groundupnews/gu
|
Change site name and domain from command line.
|
from django.core.management.base import BaseCommand, CommandError
from newsroom.models import Article
from django.contrib.sites.models import Site
def process(site_name, site_domain):
site = Site.objects.get_current()
site.name = site_name
site.domain = site_domain
site.save()
class Command(BaseCommand):
help = 'Set name and domain of site'
def add_arguments(self, parser):
parser.add_argument('site_name')
parser.add_argument('site_domain')
def handle(self, *args, **options):
process(options["site_name"], options["site_domain"])
|
<commit_before><commit_msg>Change site name and domain from command line.<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from newsroom.models import Article
from django.contrib.sites.models import Site
def process(site_name, site_domain):
site = Site.objects.get_current()
site.name = site_name
site.domain = site_domain
site.save()
class Command(BaseCommand):
help = 'Set name and domain of site'
def add_arguments(self, parser):
parser.add_argument('site_name')
parser.add_argument('site_domain')
def handle(self, *args, **options):
process(options["site_name"], options["site_domain"])
|
Change site name and domain from command line.from django.core.management.base import BaseCommand, CommandError
from newsroom.models import Article
from django.contrib.sites.models import Site
def process(site_name, site_domain):
site = Site.objects.get_current()
site.name = site_name
site.domain = site_domain
site.save()
class Command(BaseCommand):
help = 'Set name and domain of site'
def add_arguments(self, parser):
parser.add_argument('site_name')
parser.add_argument('site_domain')
def handle(self, *args, **options):
process(options["site_name"], options["site_domain"])
|
<commit_before><commit_msg>Change site name and domain from command line.<commit_after>from django.core.management.base import BaseCommand, CommandError
from newsroom.models import Article
from django.contrib.sites.models import Site
def process(site_name, site_domain):
site = Site.objects.get_current()
site.name = site_name
site.domain = site_domain
site.save()
class Command(BaseCommand):
help = 'Set name and domain of site'
def add_arguments(self, parser):
parser.add_argument('site_name')
parser.add_argument('site_domain')
def handle(self, *args, **options):
process(options["site_name"], options["site_domain"])
|
|
b98f0570842a5c51a286b09df7e7d49ae708765e
|
tests/reader-revA-test.py
|
tests/reader-revA-test.py
|
"""Tests for the revA circuit board of the 'reader'.
These tests will test the expected functionality of 'reader-revA'
(https://github.com/fmfi-svt-deadlock/reader-hw/releases/tag/revA).
They are intended to be run once on new boards before the FW is loaded
and the device is deployed
"""
import mempoke
from test_sequencer import run, ask
from devices.stm32f0 import STM32F0
dev = STM32F0(mempoke.DeviceMemory())
def turn_on_led(port, pin):
"""Helper function for testing LEDs"""
dev.RCC.AHBENR |= (1 << dev.RCC.AHBENR_bits["IOP" + port + "EN"])
dev.GPIO[port].MODER |= (dev.GPIO[port].MODE_bits["OUTPUT"] << pin*2)
dev.GPIO[port].ODR |= (1 << pin)
def reset_peripherals():
"""Resets used peripherals, teardown function"""
for i in ['A', 'B', 'C', 'D', 'E', 'F']:
dev.RCC.AHBRSTR |= (1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
dev.RCC.AHBRSTR &= ~(1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
def led1():
"""Tests green LED1"""
turn_on_led('B', 1)
error = None
if not ask('Is LED1 green?'):
error = 'Green LED1 problem'
reset_peripherals()
return error
def led2():
"""Tests green LED2"""
turn_on_led('A', 8)
error = None
if not ask('Is LED2 green?'):
error = 'Green LED2 problem'
reset_peripherals()
return error
tests = [led1, led2]
run(tests)
|
Add tests for LED1 and LED2
|
[reader-revA] Add tests for LED1 and LED2
|
Python
|
mit
|
fmfi-svt-deadlock/hw-testing,fmfi-svt-deadlock/hw-testing
|
[reader-revA] Add tests for LED1 and LED2
|
"""Tests for the revA circuit board of the 'reader'.
These tests will test the expected functionality of 'reader-revA'
(https://github.com/fmfi-svt-deadlock/reader-hw/releases/tag/revA).
They are intended to be run once on new boards before the FW is loaded
and the device is deployed
"""
import mempoke
from test_sequencer import run, ask
from devices.stm32f0 import STM32F0
dev = STM32F0(mempoke.DeviceMemory())
def turn_on_led(port, pin):
"""Helper function for testing LEDs"""
dev.RCC.AHBENR |= (1 << dev.RCC.AHBENR_bits["IOP" + port + "EN"])
dev.GPIO[port].MODER |= (dev.GPIO[port].MODE_bits["OUTPUT"] << pin*2)
dev.GPIO[port].ODR |= (1 << pin)
def reset_peripherals():
"""Resets used peripherals, teardown function"""
for i in ['A', 'B', 'C', 'D', 'E', 'F']:
dev.RCC.AHBRSTR |= (1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
dev.RCC.AHBRSTR &= ~(1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
def led1():
"""Tests green LED1"""
turn_on_led('B', 1)
error = None
if not ask('Is LED1 green?'):
error = 'Green LED1 problem'
reset_peripherals()
return error
def led2():
"""Tests green LED2"""
turn_on_led('A', 8)
error = None
if not ask('Is LED2 green?'):
error = 'Green LED2 problem'
reset_peripherals()
return error
tests = [led1, led2]
run(tests)
|
<commit_before><commit_msg>[reader-revA] Add tests for LED1 and LED2<commit_after>
|
"""Tests for the revA circuit board of the 'reader'.
These tests will test the expected functionality of 'reader-revA'
(https://github.com/fmfi-svt-deadlock/reader-hw/releases/tag/revA).
They are intended to be run once on new boards before the FW is loaded
and the device is deployed
"""
import mempoke
from test_sequencer import run, ask
from devices.stm32f0 import STM32F0
dev = STM32F0(mempoke.DeviceMemory())
def turn_on_led(port, pin):
"""Helper function for testing LEDs"""
dev.RCC.AHBENR |= (1 << dev.RCC.AHBENR_bits["IOP" + port + "EN"])
dev.GPIO[port].MODER |= (dev.GPIO[port].MODE_bits["OUTPUT"] << pin*2)
dev.GPIO[port].ODR |= (1 << pin)
def reset_peripherals():
"""Resets used peripherals, teardown function"""
for i in ['A', 'B', 'C', 'D', 'E', 'F']:
dev.RCC.AHBRSTR |= (1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
dev.RCC.AHBRSTR &= ~(1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
def led1():
"""Tests green LED1"""
turn_on_led('B', 1)
error = None
if not ask('Is LED1 green?'):
error = 'Green LED1 problem'
reset_peripherals()
return error
def led2():
"""Tests green LED2"""
turn_on_led('A', 8)
error = None
if not ask('Is LED2 green?'):
error = 'Green LED2 problem'
reset_peripherals()
return error
tests = [led1, led2]
run(tests)
|
[reader-revA] Add tests for LED1 and LED2"""Tests for the revA circuit board of the 'reader'.
These tests will test the expected functionality of 'reader-revA'
(https://github.com/fmfi-svt-deadlock/reader-hw/releases/tag/revA).
They are intended to be run once on new boards before the FW is loaded
and the device is deployed
"""
import mempoke
from test_sequencer import run, ask
from devices.stm32f0 import STM32F0
dev = STM32F0(mempoke.DeviceMemory())
def turn_on_led(port, pin):
"""Helper function for testing LEDs"""
dev.RCC.AHBENR |= (1 << dev.RCC.AHBENR_bits["IOP" + port + "EN"])
dev.GPIO[port].MODER |= (dev.GPIO[port].MODE_bits["OUTPUT"] << pin*2)
dev.GPIO[port].ODR |= (1 << pin)
def reset_peripherals():
"""Resets used peripherals, teardown function"""
for i in ['A', 'B', 'C', 'D', 'E', 'F']:
dev.RCC.AHBRSTR |= (1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
dev.RCC.AHBRSTR &= ~(1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
def led1():
"""Tests green LED1"""
turn_on_led('B', 1)
error = None
if not ask('Is LED1 green?'):
error = 'Green LED1 problem'
reset_peripherals()
return error
def led2():
"""Tests green LED2"""
turn_on_led('A', 8)
error = None
if not ask('Is LED2 green?'):
error = 'Green LED2 problem'
reset_peripherals()
return error
tests = [led1, led2]
run(tests)
|
<commit_before><commit_msg>[reader-revA] Add tests for LED1 and LED2<commit_after>"""Tests for the revA circuit board of the 'reader'.
These tests will test the expected functionality of 'reader-revA'
(https://github.com/fmfi-svt-deadlock/reader-hw/releases/tag/revA).
They are intended to be run once on new boards before the FW is loaded
and the device is deployed
"""
import mempoke
from test_sequencer import run, ask
from devices.stm32f0 import STM32F0
dev = STM32F0(mempoke.DeviceMemory())
def turn_on_led(port, pin):
"""Helper function for testing LEDs"""
dev.RCC.AHBENR |= (1 << dev.RCC.AHBENR_bits["IOP" + port + "EN"])
dev.GPIO[port].MODER |= (dev.GPIO[port].MODE_bits["OUTPUT"] << pin*2)
dev.GPIO[port].ODR |= (1 << pin)
def reset_peripherals():
"""Resets used peripherals, teardown function"""
for i in ['A', 'B', 'C', 'D', 'E', 'F']:
dev.RCC.AHBRSTR |= (1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
dev.RCC.AHBRSTR &= ~(1 << dev.RCC.AHBRSTR_bits["IOP" + i + "RST"])
def led1():
"""Tests green LED1"""
turn_on_led('B', 1)
error = None
if not ask('Is LED1 green?'):
error = 'Green LED1 problem'
reset_peripherals()
return error
def led2():
"""Tests green LED2"""
turn_on_led('A', 8)
error = None
if not ask('Is LED2 green?'):
error = 'Green LED2 problem'
reset_peripherals()
return error
tests = [led1, led2]
run(tests)
|
|
c3efc9ba15fd1833474e2a59e4e266cfc6a4b9dc
|
awacs/kms.py
|
awacs/kms.py
|
from aws import Action, BaseARN
service_name = 'Amazon Key Management Service'
prefix = 'kms'
class ARN(BaseARN):
def __init__(self, resource, region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
CancelKeyDeletion = Action(prefix, 'CancelKeyDeletion')
CreateAlias = Action(prefix, 'CreateAlias')
CreateGrant = Action(prefix, 'CreateGrant')
CreateKey = Action(prefix, 'CreateKey')
Decrypt = Action(prefix, 'Decrypt')
DeleteAlias = Action(prefix, 'DeleteAlias')
DescribeKey = Action(prefix, 'DescribeKey')
DisableKey = Action(prefix, 'DisableKey')
DisableKeyRotation = Action(prefix, 'DisableKeyRotation')
EnableKey = Action(prefix, 'EnableKey')
EnableKeyRotation = Action(prefix, 'EnableKeyRotation')
Encrypt = Action(prefix, 'Encrypt')
GenerateDataKey = Action(prefix, 'GenerateDataKey')
GenerateDataKeyWithoutPlaintext = \
Action(prefix, 'GenerateDataKeyWithoutPlaintext')
GenerateRandom = Action(prefix, 'GenerateRandom')
GetKeyPolicy = Action(prefix, 'GetKeyPolicy')
GetKeyRotationStatus = Action(prefix, 'GetKeyRotationStatus')
ListAliases = Action(prefix, 'ListAliases')
ListGrants = Action(prefix, 'ListGrants')
ListKeyPolicies = Action(prefix, 'ListKeyPolicies')
ListKeys = Action(prefix, 'ListKeys')
ListRetirableGrants = Action(prefix, 'ListRetirableGrants')
PutKeyPolicy = Action(prefix, 'PutKeyPolicy')
ReEncrypt = Action(prefix, 'ReEncrypt')
RetireGrant = Action(prefix, 'RetireGrant')
RevokeGrant = Action(prefix, 'RevokeGrant')
ScheduleKeyDeletion = Action(prefix, 'ScheduleKeyDeletion')
UpdateAlias = Action(prefix, 'UpdateAlias')
UpdateKeyDescription = Action(prefix, 'UpdateKeyDescription')
|
Add support for key management service
|
Add support for key management service
|
Python
|
bsd-2-clause
|
cloudtools/awacs,craigbruce/awacs
|
Add support for key management service
|
from aws import Action, BaseARN
service_name = 'Amazon Key Management Service'
prefix = 'kms'
class ARN(BaseARN):
def __init__(self, resource, region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
CancelKeyDeletion = Action(prefix, 'CancelKeyDeletion')
CreateAlias = Action(prefix, 'CreateAlias')
CreateGrant = Action(prefix, 'CreateGrant')
CreateKey = Action(prefix, 'CreateKey')
Decrypt = Action(prefix, 'Decrypt')
DeleteAlias = Action(prefix, 'DeleteAlias')
DescribeKey = Action(prefix, 'DescribeKey')
DisableKey = Action(prefix, 'DisableKey')
DisableKeyRotation = Action(prefix, 'DisableKeyRotation')
EnableKey = Action(prefix, 'EnableKey')
EnableKeyRotation = Action(prefix, 'EnableKeyRotation')
Encrypt = Action(prefix, 'Encrypt')
GenerateDataKey = Action(prefix, 'GenerateDataKey')
GenerateDataKeyWithoutPlaintext = \
Action(prefix, 'GenerateDataKeyWithoutPlaintext')
GenerateRandom = Action(prefix, 'GenerateRandom')
GetKeyPolicy = Action(prefix, 'GetKeyPolicy')
GetKeyRotationStatus = Action(prefix, 'GetKeyRotationStatus')
ListAliases = Action(prefix, 'ListAliases')
ListGrants = Action(prefix, 'ListGrants')
ListKeyPolicies = Action(prefix, 'ListKeyPolicies')
ListKeys = Action(prefix, 'ListKeys')
ListRetirableGrants = Action(prefix, 'ListRetirableGrants')
PutKeyPolicy = Action(prefix, 'PutKeyPolicy')
ReEncrypt = Action(prefix, 'ReEncrypt')
RetireGrant = Action(prefix, 'RetireGrant')
RevokeGrant = Action(prefix, 'RevokeGrant')
ScheduleKeyDeletion = Action(prefix, 'ScheduleKeyDeletion')
UpdateAlias = Action(prefix, 'UpdateAlias')
UpdateKeyDescription = Action(prefix, 'UpdateKeyDescription')
|
<commit_before><commit_msg>Add support for key management service<commit_after>
|
from aws import Action, BaseARN
service_name = 'Amazon Key Management Service'
prefix = 'kms'
class ARN(BaseARN):
def __init__(self, resource, region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
CancelKeyDeletion = Action(prefix, 'CancelKeyDeletion')
CreateAlias = Action(prefix, 'CreateAlias')
CreateGrant = Action(prefix, 'CreateGrant')
CreateKey = Action(prefix, 'CreateKey')
Decrypt = Action(prefix, 'Decrypt')
DeleteAlias = Action(prefix, 'DeleteAlias')
DescribeKey = Action(prefix, 'DescribeKey')
DisableKey = Action(prefix, 'DisableKey')
DisableKeyRotation = Action(prefix, 'DisableKeyRotation')
EnableKey = Action(prefix, 'EnableKey')
EnableKeyRotation = Action(prefix, 'EnableKeyRotation')
Encrypt = Action(prefix, 'Encrypt')
GenerateDataKey = Action(prefix, 'GenerateDataKey')
GenerateDataKeyWithoutPlaintext = \
Action(prefix, 'GenerateDataKeyWithoutPlaintext')
GenerateRandom = Action(prefix, 'GenerateRandom')
GetKeyPolicy = Action(prefix, 'GetKeyPolicy')
GetKeyRotationStatus = Action(prefix, 'GetKeyRotationStatus')
ListAliases = Action(prefix, 'ListAliases')
ListGrants = Action(prefix, 'ListGrants')
ListKeyPolicies = Action(prefix, 'ListKeyPolicies')
ListKeys = Action(prefix, 'ListKeys')
ListRetirableGrants = Action(prefix, 'ListRetirableGrants')
PutKeyPolicy = Action(prefix, 'PutKeyPolicy')
ReEncrypt = Action(prefix, 'ReEncrypt')
RetireGrant = Action(prefix, 'RetireGrant')
RevokeGrant = Action(prefix, 'RevokeGrant')
ScheduleKeyDeletion = Action(prefix, 'ScheduleKeyDeletion')
UpdateAlias = Action(prefix, 'UpdateAlias')
UpdateKeyDescription = Action(prefix, 'UpdateKeyDescription')
|
Add support for key management servicefrom aws import Action, BaseARN
service_name = 'Amazon Key Management Service'
prefix = 'kms'
class ARN(BaseARN):
def __init__(self, resource, region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
CancelKeyDeletion = Action(prefix, 'CancelKeyDeletion')
CreateAlias = Action(prefix, 'CreateAlias')
CreateGrant = Action(prefix, 'CreateGrant')
CreateKey = Action(prefix, 'CreateKey')
Decrypt = Action(prefix, 'Decrypt')
DeleteAlias = Action(prefix, 'DeleteAlias')
DescribeKey = Action(prefix, 'DescribeKey')
DisableKey = Action(prefix, 'DisableKey')
DisableKeyRotation = Action(prefix, 'DisableKeyRotation')
EnableKey = Action(prefix, 'EnableKey')
EnableKeyRotation = Action(prefix, 'EnableKeyRotation')
Encrypt = Action(prefix, 'Encrypt')
GenerateDataKey = Action(prefix, 'GenerateDataKey')
GenerateDataKeyWithoutPlaintext = \
Action(prefix, 'GenerateDataKeyWithoutPlaintext')
GenerateRandom = Action(prefix, 'GenerateRandom')
GetKeyPolicy = Action(prefix, 'GetKeyPolicy')
GetKeyRotationStatus = Action(prefix, 'GetKeyRotationStatus')
ListAliases = Action(prefix, 'ListAliases')
ListGrants = Action(prefix, 'ListGrants')
ListKeyPolicies = Action(prefix, 'ListKeyPolicies')
ListKeys = Action(prefix, 'ListKeys')
ListRetirableGrants = Action(prefix, 'ListRetirableGrants')
PutKeyPolicy = Action(prefix, 'PutKeyPolicy')
ReEncrypt = Action(prefix, 'ReEncrypt')
RetireGrant = Action(prefix, 'RetireGrant')
RevokeGrant = Action(prefix, 'RevokeGrant')
ScheduleKeyDeletion = Action(prefix, 'ScheduleKeyDeletion')
UpdateAlias = Action(prefix, 'UpdateAlias')
UpdateKeyDescription = Action(prefix, 'UpdateKeyDescription')
|
<commit_before><commit_msg>Add support for key management service<commit_after>from aws import Action, BaseARN
service_name = 'Amazon Key Management Service'
prefix = 'kms'
class ARN(BaseARN):
def __init__(self, resource, region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
CancelKeyDeletion = Action(prefix, 'CancelKeyDeletion')
CreateAlias = Action(prefix, 'CreateAlias')
CreateGrant = Action(prefix, 'CreateGrant')
CreateKey = Action(prefix, 'CreateKey')
Decrypt = Action(prefix, 'Decrypt')
DeleteAlias = Action(prefix, 'DeleteAlias')
DescribeKey = Action(prefix, 'DescribeKey')
DisableKey = Action(prefix, 'DisableKey')
DisableKeyRotation = Action(prefix, 'DisableKeyRotation')
EnableKey = Action(prefix, 'EnableKey')
EnableKeyRotation = Action(prefix, 'EnableKeyRotation')
Encrypt = Action(prefix, 'Encrypt')
GenerateDataKey = Action(prefix, 'GenerateDataKey')
GenerateDataKeyWithoutPlaintext = \
Action(prefix, 'GenerateDataKeyWithoutPlaintext')
GenerateRandom = Action(prefix, 'GenerateRandom')
GetKeyPolicy = Action(prefix, 'GetKeyPolicy')
GetKeyRotationStatus = Action(prefix, 'GetKeyRotationStatus')
ListAliases = Action(prefix, 'ListAliases')
ListGrants = Action(prefix, 'ListGrants')
ListKeyPolicies = Action(prefix, 'ListKeyPolicies')
ListKeys = Action(prefix, 'ListKeys')
ListRetirableGrants = Action(prefix, 'ListRetirableGrants')
PutKeyPolicy = Action(prefix, 'PutKeyPolicy')
ReEncrypt = Action(prefix, 'ReEncrypt')
RetireGrant = Action(prefix, 'RetireGrant')
RevokeGrant = Action(prefix, 'RevokeGrant')
ScheduleKeyDeletion = Action(prefix, 'ScheduleKeyDeletion')
UpdateAlias = Action(prefix, 'UpdateAlias')
UpdateKeyDescription = Action(prefix, 'UpdateKeyDescription')
|
|
356bba4061a6dbc70f086c59f1870e729733b062
|
build/unix/pandoc-jira.py
|
build/unix/pandoc-jira.py
|
#!/usr/bin/env python
import json
from sys import stdin
from StringIO import StringIO
def gotObj(obj):
# Replace {"t":"Str","c":"[ROOT-7392]"}
if 't' in obj and obj['t'] == 'Str' \
and obj['c'][0:6] == '[ROOT-':
# with {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
print {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
json.load(stdin, object_hook = gotObj);
|
Add pandoc filter to convert [ROOT-nnnn] into proper links.
|
Add pandoc filter to convert [ROOT-nnnn] into proper links.
(cherry picked from commit 5d48c8e572ee0bf6e9db759eb9089f0a921562b4)
|
Python
|
lgpl-2.1
|
simonpf/root,krafczyk/root,root-mirror/root,gganis/root,lgiommi/root,abhinavmoudgil95/root,root-mirror/root,jrtomps/root,lgiommi/root,thomaskeck/root,sawenzel/root,gbitzes/root,mhuwiler/rootauto,lgiommi/root,mkret2/root,lgiommi/root,georgtroska/root,georgtroska/root,mhuwiler/rootauto,BerserkerTroll/root,satyarth934/root,veprbl/root,jrtomps/root,root-mirror/root,davidlt/root,gbitzes/root,mattkretz/root,CristinaCristescu/root,karies/root,mkret2/root,CristinaCristescu/root,agarciamontoro/root,jrtomps/root,olifre/root,agarciamontoro/root,buuck/root,Y--/root,sawenzel/root,bbockelm/root,CristinaCristescu/root,georgtroska/root,pspe/root,davidlt/root,zzxuanyuan/root,mkret2/root,simonpf/root,bbockelm/root,BerserkerTroll/root,mattkretz/root,veprbl/root,gganis/root,esakellari/root,davidlt/root,thomaskeck/root,thomaskeck/root,gbitzes/root,veprbl/root,beniz/root,karies/root,bbockelm/root,esakellari/root,agarciamontoro/root,pspe/root,georgtroska/root,CristinaCristescu/root,sawenzel/root,abhinavmoudgil95/root,zzxuanyuan/root,Y--/root,esakellari/root,davidlt/root,BerserkerTroll/root,CristinaCristescu/root,krafczyk/root,krafczyk/root,esakellari/root,BerserkerTroll/root,root-mirror/root,CristinaCristescu/root,simonpf/root,olifre/root,bbockelm/root,mhuwiler/rootauto,buuck/root,simonpf/root,gbitzes/root,zzxuanyuan/root-compressor-dummy,sawenzel/root,krafczyk/root,esakellari/root,thomaskeck/root,georgtroska/root,CristinaCristescu/root,mkret2/root,mhuwiler/rootauto,olifre/root,georgtroska/root,gganis/root,thomaskeck/root,karies/root,pspe/root,root-mirror/root,satyarth934/root,beniz/root,mkret2/root,jrtomps/root,Y--/root,bbockelm/root,mattkretz/root,sawenzel/root,CristinaCristescu/root,krafczyk/root,jrtomps/root,CristinaCristescu/root,Y--/root,thomaskeck/root,simonpf/root,gganis/root,abhinavmoudgil95/root,agarciamontoro/root,root-mirror/root,Y--/root,lgiommi/root,satyarth934/root,gganis/root,davidlt/root,davidlt/root,gganis/root,mattkretz/root,BerserkerTroll/root,georgtroska/root,gbitzes/root,olifre/root,abhinavmoudgil95/root,BerserkerTroll/root,sawenzel/root,beniz/root,sawenzel/root,bbockelm/root,lgiommi/root,mkret2/root,mhuwiler/rootauto,olifre/root,mattkretz/root,pspe/root,beniz/root,gbitzes/root,jrtomps/root,mattkretz/root,beniz/root,Y--/root,agarciamontoro/root,zzxuanyuan/root,davidlt/root,BerserkerTroll/root,beniz/root,karies/root,pspe/root,zzxuanyuan/root-compressor-dummy,bbockelm/root,zzxuanyuan/root-compressor-dummy,krafczyk/root,jrtomps/root,satyarth934/root,esakellari/root,CristinaCristescu/root,satyarth934/root,mattkretz/root,satyarth934/root,mhuwiler/rootauto,gganis/root,davidlt/root,veprbl/root,thomaskeck/root,sawenzel/root,sawenzel/root,abhinavmoudgil95/root,mkret2/root,veprbl/root,simonpf/root,mhuwiler/rootauto,veprbl/root,pspe/root,buuck/root,buuck/root,davidlt/root,beniz/root,sawenzel/root,zzxuanyuan/root,buuck/root,gganis/root,karies/root,buuck/root,karies/root,satyarth934/root,zzxuanyuan/root-compressor-dummy,krafczyk/root,jrtomps/root,olifre/root,sawenzel/root,root-mirror/root,mhuwiler/rootauto,beniz/root,lgiommi/root,krafczyk/root,karies/root,zzxuanyuan/root-compressor-dummy,bbockelm/root,abhinavmoudgil95/root,krafczyk/root,pspe/root,buuck/root,simonpf/root,olifre/root,Y--/root,georgtroska/root,karies/root,zzxuanyuan/root-compressor-dummy,zzxuanyuan/root-compressor-dummy,beniz/root,zzxuanyuan/root-compressor-dummy,mhuwiler/rootauto,pspe/root,pspe/root,Y--/root,zzxuanyuan/root,karies/root,CristinaCristescu/root,veprbl/root,veprbl/root,georgtroska/root,lgiommi/root,zzxuanyuan/root,abhinavmoudgil95/root,agarciamontoro/root,gbitzes/root,veprbl/root,zzxuanyuan/root-compressor-dummy,agarciamontoro/root,esakellari/root,karies/root,satyarth934/root,gbitzes/root,Y--/root,zzxuanyuan/root,mhuwiler/rootauto,lgiommi/root,thomaskeck/root,zzxuanyuan/root,mkret2/root,zzxuanyuan/root,esakellari/root,lgiommi/root,gganis/root,BerserkerTroll/root,mattkretz/root,zzxuanyuan/root,beniz/root,abhinavmoudgil95/root,thomaskeck/root,veprbl/root,gbitzes/root,esakellari/root,bbockelm/root,Y--/root,zzxuanyuan/root,krafczyk/root,Y--/root,gbitzes/root,buuck/root,abhinavmoudgil95/root,mkret2/root,agarciamontoro/root,lgiommi/root,georgtroska/root,beniz/root,simonpf/root,zzxuanyuan/root-compressor-dummy,BerserkerTroll/root,simonpf/root,thomaskeck/root,bbockelm/root,jrtomps/root,root-mirror/root,satyarth934/root,abhinavmoudgil95/root,olifre/root,jrtomps/root,gganis/root,BerserkerTroll/root,olifre/root,olifre/root,olifre/root,pspe/root,georgtroska/root,agarciamontoro/root,root-mirror/root,bbockelm/root,jrtomps/root,BerserkerTroll/root,buuck/root,root-mirror/root,simonpf/root,davidlt/root,buuck/root,mkret2/root,satyarth934/root,gganis/root,buuck/root,zzxuanyuan/root-compressor-dummy,simonpf/root,mattkretz/root,karies/root,veprbl/root,esakellari/root,mhuwiler/rootauto,gbitzes/root,mkret2/root,esakellari/root,root-mirror/root,davidlt/root,pspe/root,agarciamontoro/root,satyarth934/root,agarciamontoro/root,mattkretz/root,krafczyk/root,mattkretz/root,abhinavmoudgil95/root,zzxuanyuan/root
|
Add pandoc filter to convert [ROOT-nnnn] into proper links.
(cherry picked from commit 5d48c8e572ee0bf6e9db759eb9089f0a921562b4)
|
#!/usr/bin/env python
import json
from sys import stdin
from StringIO import StringIO
def gotObj(obj):
# Replace {"t":"Str","c":"[ROOT-7392]"}
if 't' in obj and obj['t'] == 'Str' \
and obj['c'][0:6] == '[ROOT-':
# with {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
print {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
json.load(stdin, object_hook = gotObj);
|
<commit_before><commit_msg>Add pandoc filter to convert [ROOT-nnnn] into proper links.
(cherry picked from commit 5d48c8e572ee0bf6e9db759eb9089f0a921562b4)<commit_after>
|
#!/usr/bin/env python
import json
from sys import stdin
from StringIO import StringIO
def gotObj(obj):
# Replace {"t":"Str","c":"[ROOT-7392]"}
if 't' in obj and obj['t'] == 'Str' \
and obj['c'][0:6] == '[ROOT-':
# with {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
print {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
json.load(stdin, object_hook = gotObj);
|
Add pandoc filter to convert [ROOT-nnnn] into proper links.
(cherry picked from commit 5d48c8e572ee0bf6e9db759eb9089f0a921562b4)#!/usr/bin/env python
import json
from sys import stdin
from StringIO import StringIO
def gotObj(obj):
# Replace {"t":"Str","c":"[ROOT-7392]"}
if 't' in obj and obj['t'] == 'Str' \
and obj['c'][0:6] == '[ROOT-':
# with {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
print {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
json.load(stdin, object_hook = gotObj);
|
<commit_before><commit_msg>Add pandoc filter to convert [ROOT-nnnn] into proper links.
(cherry picked from commit 5d48c8e572ee0bf6e9db759eb9089f0a921562b4)<commit_after>#!/usr/bin/env python
import json
from sys import stdin
from StringIO import StringIO
def gotObj(obj):
# Replace {"t":"Str","c":"[ROOT-7392]"}
if 't' in obj and obj['t'] == 'Str' \
and obj['c'][0:6] == '[ROOT-':
# with {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
print {"t":"Link","c":[[{"t":"Str","c":"ROOT-7290"}],["https://sft.its.cern.ch/jira/browse/ROOT-7290",""]]}
json.load(stdin, object_hook = gotObj);
|
|
cf0174b72f6a7e4318b8e97f7a11fc93502c4a0f
|
command/sub8_alarm/nodes/alarm_handler.py
|
command/sub8_alarm/nodes/alarm_handler.py
|
#!/usr/bin/env python
import rospy
import json
from sub8_msgs.msg import Alarm
from std_msgs.msg import Header
from sub8_alarm import alarms
import string
class AlarmHandler(object):
def __init__(self):
'''Alarm Handler
Listen for alarms, call scenarios
TODO:
- Add alarm queue
- Handle alarms in sequence (Don't get stuck in the callback)
- bag (if set) EVERY single alarm received
'''
rospy.init_node('alarm_handler')
# Queue size is large because you bet your ass we are addressing every alarm
self.alarm_sub = rospy.Subscriber('/alarm', Alarm, self.alarm_callback, queue_size=100)
self.scenarios = {}
# Go through everything in the sub8_alarm.alarms package
for candidate_alarm_name in dir(alarms):
# Discard __* nonsense
if not candidate_alarm_name.startswith('_'):
# Verify that it is actually an alarm handler
CandidateAlarm = getattr(alarms, candidate_alarm_name)
if hasattr(CandidateAlarm, 'handle'):
self.scenarios[CandidateAlarm.alarm_name] = CandidateAlarm()
def alarm_callback(self, alarm):
time = alarm.header.stamp
if alarm.action_required:
rospy.logwarn(
"{}: {}, severity {}, handling NOW".format(
alarm.node_name, alarm.alarm_name, alarm.severity
)
)
rospy.logwarn(
"{} raised alarm of type {} of severity {} at {}".format(
alarm.node_name, alarm.alarm_name, alarm.severity, time
)
)
scenario = self.scenarios.get(alarm.alarm_name)
# Decode JSON
parameters = json.loads(alarm.parameters)
scenario.handle(time, parameters)
if __name__ == '__main__':
alarm_handler = AlarmHandler()
rospy.spin()
|
Add core alarm handling node
|
ALARMS: Add core alarm handling node
|
Python
|
mit
|
pemami4911/Sub8,pemami4911/Sub8,pemami4911/Sub8
|
ALARMS: Add core alarm handling node
|
#!/usr/bin/env python
import rospy
import json
from sub8_msgs.msg import Alarm
from std_msgs.msg import Header
from sub8_alarm import alarms
import string
class AlarmHandler(object):
def __init__(self):
'''Alarm Handler
Listen for alarms, call scenarios
TODO:
- Add alarm queue
- Handle alarms in sequence (Don't get stuck in the callback)
- bag (if set) EVERY single alarm received
'''
rospy.init_node('alarm_handler')
# Queue size is large because you bet your ass we are addressing every alarm
self.alarm_sub = rospy.Subscriber('/alarm', Alarm, self.alarm_callback, queue_size=100)
self.scenarios = {}
# Go through everything in the sub8_alarm.alarms package
for candidate_alarm_name in dir(alarms):
# Discard __* nonsense
if not candidate_alarm_name.startswith('_'):
# Verify that it is actually an alarm handler
CandidateAlarm = getattr(alarms, candidate_alarm_name)
if hasattr(CandidateAlarm, 'handle'):
self.scenarios[CandidateAlarm.alarm_name] = CandidateAlarm()
def alarm_callback(self, alarm):
time = alarm.header.stamp
if alarm.action_required:
rospy.logwarn(
"{}: {}, severity {}, handling NOW".format(
alarm.node_name, alarm.alarm_name, alarm.severity
)
)
rospy.logwarn(
"{} raised alarm of type {} of severity {} at {}".format(
alarm.node_name, alarm.alarm_name, alarm.severity, time
)
)
scenario = self.scenarios.get(alarm.alarm_name)
# Decode JSON
parameters = json.loads(alarm.parameters)
scenario.handle(time, parameters)
if __name__ == '__main__':
alarm_handler = AlarmHandler()
rospy.spin()
|
<commit_before><commit_msg>ALARMS: Add core alarm handling node<commit_after>
|
#!/usr/bin/env python
import rospy
import json
from sub8_msgs.msg import Alarm
from std_msgs.msg import Header
from sub8_alarm import alarms
import string
class AlarmHandler(object):
def __init__(self):
'''Alarm Handler
Listen for alarms, call scenarios
TODO:
- Add alarm queue
- Handle alarms in sequence (Don't get stuck in the callback)
- bag (if set) EVERY single alarm received
'''
rospy.init_node('alarm_handler')
# Queue size is large because you bet your ass we are addressing every alarm
self.alarm_sub = rospy.Subscriber('/alarm', Alarm, self.alarm_callback, queue_size=100)
self.scenarios = {}
# Go through everything in the sub8_alarm.alarms package
for candidate_alarm_name in dir(alarms):
# Discard __* nonsense
if not candidate_alarm_name.startswith('_'):
# Verify that it is actually an alarm handler
CandidateAlarm = getattr(alarms, candidate_alarm_name)
if hasattr(CandidateAlarm, 'handle'):
self.scenarios[CandidateAlarm.alarm_name] = CandidateAlarm()
def alarm_callback(self, alarm):
time = alarm.header.stamp
if alarm.action_required:
rospy.logwarn(
"{}: {}, severity {}, handling NOW".format(
alarm.node_name, alarm.alarm_name, alarm.severity
)
)
rospy.logwarn(
"{} raised alarm of type {} of severity {} at {}".format(
alarm.node_name, alarm.alarm_name, alarm.severity, time
)
)
scenario = self.scenarios.get(alarm.alarm_name)
# Decode JSON
parameters = json.loads(alarm.parameters)
scenario.handle(time, parameters)
if __name__ == '__main__':
alarm_handler = AlarmHandler()
rospy.spin()
|
ALARMS: Add core alarm handling node#!/usr/bin/env python
import rospy
import json
from sub8_msgs.msg import Alarm
from std_msgs.msg import Header
from sub8_alarm import alarms
import string
class AlarmHandler(object):
def __init__(self):
'''Alarm Handler
Listen for alarms, call scenarios
TODO:
- Add alarm queue
- Handle alarms in sequence (Don't get stuck in the callback)
- bag (if set) EVERY single alarm received
'''
rospy.init_node('alarm_handler')
# Queue size is large because you bet your ass we are addressing every alarm
self.alarm_sub = rospy.Subscriber('/alarm', Alarm, self.alarm_callback, queue_size=100)
self.scenarios = {}
# Go through everything in the sub8_alarm.alarms package
for candidate_alarm_name in dir(alarms):
# Discard __* nonsense
if not candidate_alarm_name.startswith('_'):
# Verify that it is actually an alarm handler
CandidateAlarm = getattr(alarms, candidate_alarm_name)
if hasattr(CandidateAlarm, 'handle'):
self.scenarios[CandidateAlarm.alarm_name] = CandidateAlarm()
def alarm_callback(self, alarm):
time = alarm.header.stamp
if alarm.action_required:
rospy.logwarn(
"{}: {}, severity {}, handling NOW".format(
alarm.node_name, alarm.alarm_name, alarm.severity
)
)
rospy.logwarn(
"{} raised alarm of type {} of severity {} at {}".format(
alarm.node_name, alarm.alarm_name, alarm.severity, time
)
)
scenario = self.scenarios.get(alarm.alarm_name)
# Decode JSON
parameters = json.loads(alarm.parameters)
scenario.handle(time, parameters)
if __name__ == '__main__':
alarm_handler = AlarmHandler()
rospy.spin()
|
<commit_before><commit_msg>ALARMS: Add core alarm handling node<commit_after>#!/usr/bin/env python
import rospy
import json
from sub8_msgs.msg import Alarm
from std_msgs.msg import Header
from sub8_alarm import alarms
import string
class AlarmHandler(object):
def __init__(self):
'''Alarm Handler
Listen for alarms, call scenarios
TODO:
- Add alarm queue
- Handle alarms in sequence (Don't get stuck in the callback)
- bag (if set) EVERY single alarm received
'''
rospy.init_node('alarm_handler')
# Queue size is large because you bet your ass we are addressing every alarm
self.alarm_sub = rospy.Subscriber('/alarm', Alarm, self.alarm_callback, queue_size=100)
self.scenarios = {}
# Go through everything in the sub8_alarm.alarms package
for candidate_alarm_name in dir(alarms):
# Discard __* nonsense
if not candidate_alarm_name.startswith('_'):
# Verify that it is actually an alarm handler
CandidateAlarm = getattr(alarms, candidate_alarm_name)
if hasattr(CandidateAlarm, 'handle'):
self.scenarios[CandidateAlarm.alarm_name] = CandidateAlarm()
def alarm_callback(self, alarm):
time = alarm.header.stamp
if alarm.action_required:
rospy.logwarn(
"{}: {}, severity {}, handling NOW".format(
alarm.node_name, alarm.alarm_name, alarm.severity
)
)
rospy.logwarn(
"{} raised alarm of type {} of severity {} at {}".format(
alarm.node_name, alarm.alarm_name, alarm.severity, time
)
)
scenario = self.scenarios.get(alarm.alarm_name)
# Decode JSON
parameters = json.loads(alarm.parameters)
scenario.handle(time, parameters)
if __name__ == '__main__':
alarm_handler = AlarmHandler()
rospy.spin()
|
|
a1e332d8deef67d0a854471cfbb5fd3a57658e49
|
py/judge-route-circle.py
|
py/judge-route-circle.py
|
from collections import Counter
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
c = Counter(moves)
return c['L'] == c['R'] and c['U'] == c['D']
|
Add sql solution for 657. Judge Route Circle
|
Add sql solution for 657. Judge Route Circle
657. Judge Route Circle: https://leetcode.com/problems/judge-route-circle/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add sql solution for 657. Judge Route Circle
657. Judge Route Circle: https://leetcode.com/problems/judge-route-circle/
|
from collections import Counter
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
c = Counter(moves)
return c['L'] == c['R'] and c['U'] == c['D']
|
<commit_before><commit_msg>Add sql solution for 657. Judge Route Circle
657. Judge Route Circle: https://leetcode.com/problems/judge-route-circle/<commit_after>
|
from collections import Counter
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
c = Counter(moves)
return c['L'] == c['R'] and c['U'] == c['D']
|
Add sql solution for 657. Judge Route Circle
657. Judge Route Circle: https://leetcode.com/problems/judge-route-circle/from collections import Counter
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
c = Counter(moves)
return c['L'] == c['R'] and c['U'] == c['D']
|
<commit_before><commit_msg>Add sql solution for 657. Judge Route Circle
657. Judge Route Circle: https://leetcode.com/problems/judge-route-circle/<commit_after>from collections import Counter
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
c = Counter(moves)
return c['L'] == c['R'] and c['U'] == c['D']
|
|
8c087accfcc1e5692a608162a2f8e867e8a9b7df
|
tools/update-seahub-db_0.9.4_to_0.9.5.py
|
tools/update-seahub-db_0.9.4_to_0.9.5.py
|
#!/usr/bin/env python
# encoding: utf-8
import sqlite3
conn = sqlite3.connect('/home/xiez/seahub/seahub.db')
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
|
Add update script to tools
|
Add update script to tools
|
Python
|
apache-2.0
|
madflow/seahub,madflow/seahub,Chilledheart/seahub,madflow/seahub,Chilledheart/seahub,madflow/seahub,cloudcopy/seahub,cloudcopy/seahub,miurahr/seahub,Chilledheart/seahub,miurahr/seahub,miurahr/seahub,madflow/seahub,cloudcopy/seahub,Chilledheart/seahub,Chilledheart/seahub,miurahr/seahub,cloudcopy/seahub
|
Add update script to tools
|
#!/usr/bin/env python
# encoding: utf-8
import sqlite3
conn = sqlite3.connect('/home/xiez/seahub/seahub.db')
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
|
<commit_before><commit_msg>Add update script to tools<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
import sqlite3
conn = sqlite3.connect('/home/xiez/seahub/seahub.db')
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
|
Add update script to tools#!/usr/bin/env python
# encoding: utf-8
import sqlite3
conn = sqlite3.connect('/home/xiez/seahub/seahub.db')
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
|
<commit_before><commit_msg>Add update script to tools<commit_after>#!/usr/bin/env python
# encoding: utf-8
import sqlite3
conn = sqlite3.connect('/home/xiez/seahub/seahub.db')
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
|
|
dcffaaf4bccee5930d7ec0cef6e81543bb4d851e
|
examples/chord_catalog.py
|
examples/chord_catalog.py
|
"""
Play all the 3-note chords on one octave, after Tom Johnson's "Chord Catalog".
Example originally by Douglas Repetto
"""
import pretty_midi
import itertools
# Our starting note/octave is middle C
base_note = 60
# Time between each chord
chord_duration = .1
# Length of each note
note_duration = chord_duration*.9
# Make a pretty midi object
pm = pretty_midi.PrettyMIDI()
# Add synth voice instrument
synth_voice = pretty_midi.instrument_name_to_program('Whistle')
pm.instruments.append(pretty_midi.Instrument(synth_voice))
# Keep track of timing
curr_time = 0.0
# All notes have velocity 100
velocity = 100
# itertools.combinations computes all pairs of items without replacement
for offset_1, offset_2 in itertools.combinations(range(1, 12), 2):
# Create our chord from our three chord[n] values
# Notes start at curr_time and end at curr_time + note_duration
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_1, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_2, curr_time, curr_time + note_duration))
# Increment curr_time with note_duration
curr_time += chord_duration
midi_filename = "all_chords.mid"
pm.write(midi_filename)
|
Add MIDI file generation example
|
Add MIDI file generation example
|
Python
|
mit
|
douglaseck/pretty-midi,craffel/pretty-midi,rafaelvalle/pretty-midi,tygeng/pretty-midi
|
Add MIDI file generation example
|
"""
Play all the 3-note chords on one octave, after Tom Johnson's "Chord Catalog".
Example originally by Douglas Repetto
"""
import pretty_midi
import itertools
# Our starting note/octave is middle C
base_note = 60
# Time between each chord
chord_duration = .1
# Length of each note
note_duration = chord_duration*.9
# Make a pretty midi object
pm = pretty_midi.PrettyMIDI()
# Add synth voice instrument
synth_voice = pretty_midi.instrument_name_to_program('Whistle')
pm.instruments.append(pretty_midi.Instrument(synth_voice))
# Keep track of timing
curr_time = 0.0
# All notes have velocity 100
velocity = 100
# itertools.combinations computes all pairs of items without replacement
for offset_1, offset_2 in itertools.combinations(range(1, 12), 2):
# Create our chord from our three chord[n] values
# Notes start at curr_time and end at curr_time + note_duration
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_1, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_2, curr_time, curr_time + note_duration))
# Increment curr_time with note_duration
curr_time += chord_duration
midi_filename = "all_chords.mid"
pm.write(midi_filename)
|
<commit_before><commit_msg>Add MIDI file generation example<commit_after>
|
"""
Play all the 3-note chords on one octave, after Tom Johnson's "Chord Catalog".
Example originally by Douglas Repetto
"""
import pretty_midi
import itertools
# Our starting note/octave is middle C
base_note = 60
# Time between each chord
chord_duration = .1
# Length of each note
note_duration = chord_duration*.9
# Make a pretty midi object
pm = pretty_midi.PrettyMIDI()
# Add synth voice instrument
synth_voice = pretty_midi.instrument_name_to_program('Whistle')
pm.instruments.append(pretty_midi.Instrument(synth_voice))
# Keep track of timing
curr_time = 0.0
# All notes have velocity 100
velocity = 100
# itertools.combinations computes all pairs of items without replacement
for offset_1, offset_2 in itertools.combinations(range(1, 12), 2):
# Create our chord from our three chord[n] values
# Notes start at curr_time and end at curr_time + note_duration
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_1, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_2, curr_time, curr_time + note_duration))
# Increment curr_time with note_duration
curr_time += chord_duration
midi_filename = "all_chords.mid"
pm.write(midi_filename)
|
Add MIDI file generation example"""
Play all the 3-note chords on one octave, after Tom Johnson's "Chord Catalog".
Example originally by Douglas Repetto
"""
import pretty_midi
import itertools
# Our starting note/octave is middle C
base_note = 60
# Time between each chord
chord_duration = .1
# Length of each note
note_duration = chord_duration*.9
# Make a pretty midi object
pm = pretty_midi.PrettyMIDI()
# Add synth voice instrument
synth_voice = pretty_midi.instrument_name_to_program('Whistle')
pm.instruments.append(pretty_midi.Instrument(synth_voice))
# Keep track of timing
curr_time = 0.0
# All notes have velocity 100
velocity = 100
# itertools.combinations computes all pairs of items without replacement
for offset_1, offset_2 in itertools.combinations(range(1, 12), 2):
# Create our chord from our three chord[n] values
# Notes start at curr_time and end at curr_time + note_duration
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_1, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_2, curr_time, curr_time + note_duration))
# Increment curr_time with note_duration
curr_time += chord_duration
midi_filename = "all_chords.mid"
pm.write(midi_filename)
|
<commit_before><commit_msg>Add MIDI file generation example<commit_after>"""
Play all the 3-note chords on one octave, after Tom Johnson's "Chord Catalog".
Example originally by Douglas Repetto
"""
import pretty_midi
import itertools
# Our starting note/octave is middle C
base_note = 60
# Time between each chord
chord_duration = .1
# Length of each note
note_duration = chord_duration*.9
# Make a pretty midi object
pm = pretty_midi.PrettyMIDI()
# Add synth voice instrument
synth_voice = pretty_midi.instrument_name_to_program('Whistle')
pm.instruments.append(pretty_midi.Instrument(synth_voice))
# Keep track of timing
curr_time = 0.0
# All notes have velocity 100
velocity = 100
# itertools.combinations computes all pairs of items without replacement
for offset_1, offset_2 in itertools.combinations(range(1, 12), 2):
# Create our chord from our three chord[n] values
# Notes start at curr_time and end at curr_time + note_duration
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_1, curr_time, curr_time + note_duration))
pm.instruments[0].notes.append(pretty_midi.Note(
velocity, base_note + offset_2, curr_time, curr_time + note_duration))
# Increment curr_time with note_duration
curr_time += chord_duration
midi_filename = "all_chords.mid"
pm.write(midi_filename)
|
|
1e4a2b9c9898de4f8e98ac09179afdd7a47f8508
|
demo_data.py
|
demo_data.py
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Business, Category, Account, Transaction, AccountUser
engine = create_engine('sqlite:///bam.db')
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # Drop all existing tables
Base.metadata.create_all(engine) # Create new tables
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create users
user1 = User(username="demo", password="demo", salt="tba")
session.add(user1)
session.commit()
user2 = User(username="normal", password="normal", salt="tba")
session.add(user2)
session.commit()
# Create businesses
business1 = Business(busname="Acme", username="demo")
session.add(business1)
session.commit()
business2 = Business(busname="Caltex", username="demo")
session.add(business2)
session.commit()
|
Add file that was omitted from previous commit
|
Add file that was omitted from previous commit
|
Python
|
unknown
|
gregcowell/BAM,gregcowell/PFT,gregcowell/PFT,gregcowell/BAM
|
Add file that was omitted from previous commit
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Business, Category, Account, Transaction, AccountUser
engine = create_engine('sqlite:///bam.db')
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # Drop all existing tables
Base.metadata.create_all(engine) # Create new tables
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create users
user1 = User(username="demo", password="demo", salt="tba")
session.add(user1)
session.commit()
user2 = User(username="normal", password="normal", salt="tba")
session.add(user2)
session.commit()
# Create businesses
business1 = Business(busname="Acme", username="demo")
session.add(business1)
session.commit()
business2 = Business(busname="Caltex", username="demo")
session.add(business2)
session.commit()
|
<commit_before><commit_msg>Add file that was omitted from previous commit<commit_after>
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Business, Category, Account, Transaction, AccountUser
engine = create_engine('sqlite:///bam.db')
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # Drop all existing tables
Base.metadata.create_all(engine) # Create new tables
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create users
user1 = User(username="demo", password="demo", salt="tba")
session.add(user1)
session.commit()
user2 = User(username="normal", password="normal", salt="tba")
session.add(user2)
session.commit()
# Create businesses
business1 = Business(busname="Acme", username="demo")
session.add(business1)
session.commit()
business2 = Business(busname="Caltex", username="demo")
session.add(business2)
session.commit()
|
Add file that was omitted from previous commitfrom sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Business, Category, Account, Transaction, AccountUser
engine = create_engine('sqlite:///bam.db')
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # Drop all existing tables
Base.metadata.create_all(engine) # Create new tables
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create users
user1 = User(username="demo", password="demo", salt="tba")
session.add(user1)
session.commit()
user2 = User(username="normal", password="normal", salt="tba")
session.add(user2)
session.commit()
# Create businesses
business1 = Business(busname="Acme", username="demo")
session.add(business1)
session.commit()
business2 = Business(busname="Caltex", username="demo")
session.add(business2)
session.commit()
|
<commit_before><commit_msg>Add file that was omitted from previous commit<commit_after>from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Business, Category, Account, Transaction, AccountUser
engine = create_engine('sqlite:///bam.db')
Base.metadata.bind = engine
Base.metadata.drop_all(engine) # Drop all existing tables
Base.metadata.create_all(engine) # Create new tables
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create users
user1 = User(username="demo", password="demo", salt="tba")
session.add(user1)
session.commit()
user2 = User(username="normal", password="normal", salt="tba")
session.add(user2)
session.commit()
# Create businesses
business1 = Business(busname="Acme", username="demo")
session.add(business1)
session.commit()
business2 = Business(busname="Caltex", username="demo")
session.add(business2)
session.commit()
|
|
e411edf21bc13518fb1dba8f394fb4d0ed7ed60d
|
scripts/monitor_yatsm.py
|
scripts/monitor_yatsm.py
|
#!/usr/bin/env python
""" Near real-time monitoring
Usage:
monitor_yatsm.py [options] <config_file> <job_number> <total_jobs>
Options:
--resume Do not overwrite pre-existing results
-v --verbose Show verbose debugging messages
--version Print program version and exit
-h --help Show help
"""
|
Add prototype CLI to monitoring script
|
Add prototype CLI to monitoring script
|
Python
|
mit
|
valpasq/yatsm,valpasq/yatsm,c11/yatsm,jmorton/yatsm,c11/yatsm,jmorton/yatsm,ceholden/yatsm,jmorton/yatsm,ceholden/yatsm
|
Add prototype CLI to monitoring script
|
#!/usr/bin/env python
""" Near real-time monitoring
Usage:
monitor_yatsm.py [options] <config_file> <job_number> <total_jobs>
Options:
--resume Do not overwrite pre-existing results
-v --verbose Show verbose debugging messages
--version Print program version and exit
-h --help Show help
"""
|
<commit_before><commit_msg>Add prototype CLI to monitoring script<commit_after>
|
#!/usr/bin/env python
""" Near real-time monitoring
Usage:
monitor_yatsm.py [options] <config_file> <job_number> <total_jobs>
Options:
--resume Do not overwrite pre-existing results
-v --verbose Show verbose debugging messages
--version Print program version and exit
-h --help Show help
"""
|
Add prototype CLI to monitoring script#!/usr/bin/env python
""" Near real-time monitoring
Usage:
monitor_yatsm.py [options] <config_file> <job_number> <total_jobs>
Options:
--resume Do not overwrite pre-existing results
-v --verbose Show verbose debugging messages
--version Print program version and exit
-h --help Show help
"""
|
<commit_before><commit_msg>Add prototype CLI to monitoring script<commit_after>#!/usr/bin/env python
""" Near real-time monitoring
Usage:
monitor_yatsm.py [options] <config_file> <job_number> <total_jobs>
Options:
--resume Do not overwrite pre-existing results
-v --verbose Show verbose debugging messages
--version Print program version and exit
-h --help Show help
"""
|
|
6fcc2bff61aad25d9f2f1da93c1e1cc6390c19b7
|
scripts/motion_sensor.py
|
scripts/motion_sensor.py
|
import time
import RPi.GPIO as GPIO
from blink2 import Blink
GPIO.setmode(GPIO.BCM)
pir_pin = 23
GPIO.setup(pir_pin, GPIO.IN) # activate input
i = 0
while True:
if GPIO.input(pir_pin):
i += 1
print("%i PIR ALARM!" % i)
Blink(27, 3, 0.3)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pir_pin, GPIO.IN)
# time.sleep(.5)
|
Add motion sensor python script.
|
Add motion sensor python script.
|
Python
|
bsd-3-clause
|
kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi
|
Add motion sensor python script.
|
import time
import RPi.GPIO as GPIO
from blink2 import Blink
GPIO.setmode(GPIO.BCM)
pir_pin = 23
GPIO.setup(pir_pin, GPIO.IN) # activate input
i = 0
while True:
if GPIO.input(pir_pin):
i += 1
print("%i PIR ALARM!" % i)
Blink(27, 3, 0.3)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pir_pin, GPIO.IN)
# time.sleep(.5)
|
<commit_before><commit_msg>Add motion sensor python script.<commit_after>
|
import time
import RPi.GPIO as GPIO
from blink2 import Blink
GPIO.setmode(GPIO.BCM)
pir_pin = 23
GPIO.setup(pir_pin, GPIO.IN) # activate input
i = 0
while True:
if GPIO.input(pir_pin):
i += 1
print("%i PIR ALARM!" % i)
Blink(27, 3, 0.3)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pir_pin, GPIO.IN)
# time.sleep(.5)
|
Add motion sensor python script.import time
import RPi.GPIO as GPIO
from blink2 import Blink
GPIO.setmode(GPIO.BCM)
pir_pin = 23
GPIO.setup(pir_pin, GPIO.IN) # activate input
i = 0
while True:
if GPIO.input(pir_pin):
i += 1
print("%i PIR ALARM!" % i)
Blink(27, 3, 0.3)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pir_pin, GPIO.IN)
# time.sleep(.5)
|
<commit_before><commit_msg>Add motion sensor python script.<commit_after>import time
import RPi.GPIO as GPIO
from blink2 import Blink
GPIO.setmode(GPIO.BCM)
pir_pin = 23
GPIO.setup(pir_pin, GPIO.IN) # activate input
i = 0
while True:
if GPIO.input(pir_pin):
i += 1
print("%i PIR ALARM!" % i)
Blink(27, 3, 0.3)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pir_pin, GPIO.IN)
# time.sleep(.5)
|
|
9a27a0699127be1a6b2770e6ffcbfa22a2f72e32
|
_tests/test_item.py
|
_tests/test_item.py
|
from unittest import TestCase
from classes.item import Item
class TestShoppingList(TestCase):
def setUp(self):
self.item = Item("name")
def tearDown(self):
self.item = None
def test_item_id_is_int(self):
self.assertIsInstance(self.item.id, int)
def test_item_name_is_str(self):
self.assertIsInstance(self.item.name, str)
def test_update_item_without_name(self):
self.assertTrue(self.item.update(None), "Item must have a name")
def test_update_item_with_invalid_name(self):
self.assertTrue(self.item.update([]), "Item name must be a string")
def test_update_item(self):
self.item.update("new name")
self.assertEqual(
self.item.name,
"new name",
msg="Method update should update the items name"
)
|
Create test-cases for class item
|
[TESTS] Create test-cases for class item
|
Python
|
mit
|
EinsteinCarrey/Shoppinglist,EinsteinCarrey/Shoppinglist,EinsteinCarrey/Shoppinglist
|
[TESTS] Create test-cases for class item
|
from unittest import TestCase
from classes.item import Item
class TestShoppingList(TestCase):
def setUp(self):
self.item = Item("name")
def tearDown(self):
self.item = None
def test_item_id_is_int(self):
self.assertIsInstance(self.item.id, int)
def test_item_name_is_str(self):
self.assertIsInstance(self.item.name, str)
def test_update_item_without_name(self):
self.assertTrue(self.item.update(None), "Item must have a name")
def test_update_item_with_invalid_name(self):
self.assertTrue(self.item.update([]), "Item name must be a string")
def test_update_item(self):
self.item.update("new name")
self.assertEqual(
self.item.name,
"new name",
msg="Method update should update the items name"
)
|
<commit_before><commit_msg>[TESTS] Create test-cases for class item<commit_after>
|
from unittest import TestCase
from classes.item import Item
class TestShoppingList(TestCase):
def setUp(self):
self.item = Item("name")
def tearDown(self):
self.item = None
def test_item_id_is_int(self):
self.assertIsInstance(self.item.id, int)
def test_item_name_is_str(self):
self.assertIsInstance(self.item.name, str)
def test_update_item_without_name(self):
self.assertTrue(self.item.update(None), "Item must have a name")
def test_update_item_with_invalid_name(self):
self.assertTrue(self.item.update([]), "Item name must be a string")
def test_update_item(self):
self.item.update("new name")
self.assertEqual(
self.item.name,
"new name",
msg="Method update should update the items name"
)
|
[TESTS] Create test-cases for class itemfrom unittest import TestCase
from classes.item import Item
class TestShoppingList(TestCase):
def setUp(self):
self.item = Item("name")
def tearDown(self):
self.item = None
def test_item_id_is_int(self):
self.assertIsInstance(self.item.id, int)
def test_item_name_is_str(self):
self.assertIsInstance(self.item.name, str)
def test_update_item_without_name(self):
self.assertTrue(self.item.update(None), "Item must have a name")
def test_update_item_with_invalid_name(self):
self.assertTrue(self.item.update([]), "Item name must be a string")
def test_update_item(self):
self.item.update("new name")
self.assertEqual(
self.item.name,
"new name",
msg="Method update should update the items name"
)
|
<commit_before><commit_msg>[TESTS] Create test-cases for class item<commit_after>from unittest import TestCase
from classes.item import Item
class TestShoppingList(TestCase):
def setUp(self):
self.item = Item("name")
def tearDown(self):
self.item = None
def test_item_id_is_int(self):
self.assertIsInstance(self.item.id, int)
def test_item_name_is_str(self):
self.assertIsInstance(self.item.name, str)
def test_update_item_without_name(self):
self.assertTrue(self.item.update(None), "Item must have a name")
def test_update_item_with_invalid_name(self):
self.assertTrue(self.item.update([]), "Item name must be a string")
def test_update_item(self):
self.item.update("new name")
self.assertEqual(
self.item.name,
"new name",
msg="Method update should update the items name"
)
|
|
92de07a7f959df2a4ad7d3dfcb1e9fb8af890fc9
|
openedx/core/djangoapps/content/block_structure/migrations/0005_trim_leading_slashes_in_data_path.py
|
openedx/core/djangoapps/content/block_structure/migrations/0005_trim_leading_slashes_in_data_path.py
|
"""
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
|
Convert block_structure.data to relative paths (TNL-8335)
|
fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.
|
Python
|
agpl-3.0
|
edx/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,edx/edx-platform,arbrandes/edx-platform,angelapper/edx-platform
|
fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.
|
"""
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
|
<commit_before><commit_msg>fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.<commit_after>
|
"""
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
|
fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected."""
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
|
<commit_before><commit_msg>fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.<commit_after>"""
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
|
|
ed729218148a2700093bc17ffd7e0b10400b9443
|
alembic/versions/f96f78987d58_added_self_referential_image_parent.py
|
alembic/versions/f96f78987d58_added_self_referential_image_parent.py
|
"""Added self-referential image parent
Revision ID: f96f78987d58
Revises: 698cc06661d6
Create Date: 2016-03-20 19:21:05.651929
"""
# revision identifiers, used by Alembic.
revision = 'f96f78987d58'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('parent_image_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_images_imbo_id'), 'images', ['imbo_id'], unique=False)
op.create_index(op.f('ix_images_parent_image_id'), 'images', ['parent_image_id'], unique=False)
op.create_foreign_key(None, 'images', 'images', ['parent_image_id'], ['id'])
op.create_index(op.f('ix_pages_sections_layout_settings_page_section_id'), 'pages_sections_layout_settings', ['page_section_id'], unique=False)
op.drop_index('ix_sites_settings_site_id', table_name='sites_settings')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_sites_settings_site_id', 'sites_settings', ['site_id'], unique=False)
op.drop_index(op.f('ix_pages_sections_layout_settings_page_section_id'), table_name='pages_sections_layout_settings')
op.drop_constraint(None, 'images', type_='foreignkey')
op.drop_index(op.f('ix_images_parent_image_id'), table_name='images')
op.drop_index(op.f('ix_images_imbo_id'), table_name='images')
op.drop_column('images', 'parent_image_id')
### end Alembic commands ###
|
Add DB migration for self referential images
|
Add DB migration for self referential images
|
Python
|
mit
|
matslindh/kimochi,matslindh/kimochi
|
Add DB migration for self referential images
|
"""Added self-referential image parent
Revision ID: f96f78987d58
Revises: 698cc06661d6
Create Date: 2016-03-20 19:21:05.651929
"""
# revision identifiers, used by Alembic.
revision = 'f96f78987d58'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('parent_image_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_images_imbo_id'), 'images', ['imbo_id'], unique=False)
op.create_index(op.f('ix_images_parent_image_id'), 'images', ['parent_image_id'], unique=False)
op.create_foreign_key(None, 'images', 'images', ['parent_image_id'], ['id'])
op.create_index(op.f('ix_pages_sections_layout_settings_page_section_id'), 'pages_sections_layout_settings', ['page_section_id'], unique=False)
op.drop_index('ix_sites_settings_site_id', table_name='sites_settings')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_sites_settings_site_id', 'sites_settings', ['site_id'], unique=False)
op.drop_index(op.f('ix_pages_sections_layout_settings_page_section_id'), table_name='pages_sections_layout_settings')
op.drop_constraint(None, 'images', type_='foreignkey')
op.drop_index(op.f('ix_images_parent_image_id'), table_name='images')
op.drop_index(op.f('ix_images_imbo_id'), table_name='images')
op.drop_column('images', 'parent_image_id')
### end Alembic commands ###
|
<commit_before><commit_msg>Add DB migration for self referential images<commit_after>
|
"""Added self-referential image parent
Revision ID: f96f78987d58
Revises: 698cc06661d6
Create Date: 2016-03-20 19:21:05.651929
"""
# revision identifiers, used by Alembic.
revision = 'f96f78987d58'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('parent_image_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_images_imbo_id'), 'images', ['imbo_id'], unique=False)
op.create_index(op.f('ix_images_parent_image_id'), 'images', ['parent_image_id'], unique=False)
op.create_foreign_key(None, 'images', 'images', ['parent_image_id'], ['id'])
op.create_index(op.f('ix_pages_sections_layout_settings_page_section_id'), 'pages_sections_layout_settings', ['page_section_id'], unique=False)
op.drop_index('ix_sites_settings_site_id', table_name='sites_settings')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_sites_settings_site_id', 'sites_settings', ['site_id'], unique=False)
op.drop_index(op.f('ix_pages_sections_layout_settings_page_section_id'), table_name='pages_sections_layout_settings')
op.drop_constraint(None, 'images', type_='foreignkey')
op.drop_index(op.f('ix_images_parent_image_id'), table_name='images')
op.drop_index(op.f('ix_images_imbo_id'), table_name='images')
op.drop_column('images', 'parent_image_id')
### end Alembic commands ###
|
Add DB migration for self referential images"""Added self-referential image parent
Revision ID: f96f78987d58
Revises: 698cc06661d6
Create Date: 2016-03-20 19:21:05.651929
"""
# revision identifiers, used by Alembic.
revision = 'f96f78987d58'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('parent_image_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_images_imbo_id'), 'images', ['imbo_id'], unique=False)
op.create_index(op.f('ix_images_parent_image_id'), 'images', ['parent_image_id'], unique=False)
op.create_foreign_key(None, 'images', 'images', ['parent_image_id'], ['id'])
op.create_index(op.f('ix_pages_sections_layout_settings_page_section_id'), 'pages_sections_layout_settings', ['page_section_id'], unique=False)
op.drop_index('ix_sites_settings_site_id', table_name='sites_settings')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_sites_settings_site_id', 'sites_settings', ['site_id'], unique=False)
op.drop_index(op.f('ix_pages_sections_layout_settings_page_section_id'), table_name='pages_sections_layout_settings')
op.drop_constraint(None, 'images', type_='foreignkey')
op.drop_index(op.f('ix_images_parent_image_id'), table_name='images')
op.drop_index(op.f('ix_images_imbo_id'), table_name='images')
op.drop_column('images', 'parent_image_id')
### end Alembic commands ###
|
<commit_before><commit_msg>Add DB migration for self referential images<commit_after>"""Added self-referential image parent
Revision ID: f96f78987d58
Revises: 698cc06661d6
Create Date: 2016-03-20 19:21:05.651929
"""
# revision identifiers, used by Alembic.
revision = 'f96f78987d58'
down_revision = '698cc06661d6'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('parent_image_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_images_imbo_id'), 'images', ['imbo_id'], unique=False)
op.create_index(op.f('ix_images_parent_image_id'), 'images', ['parent_image_id'], unique=False)
op.create_foreign_key(None, 'images', 'images', ['parent_image_id'], ['id'])
op.create_index(op.f('ix_pages_sections_layout_settings_page_section_id'), 'pages_sections_layout_settings', ['page_section_id'], unique=False)
op.drop_index('ix_sites_settings_site_id', table_name='sites_settings')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_sites_settings_site_id', 'sites_settings', ['site_id'], unique=False)
op.drop_index(op.f('ix_pages_sections_layout_settings_page_section_id'), table_name='pages_sections_layout_settings')
op.drop_constraint(None, 'images', type_='foreignkey')
op.drop_index(op.f('ix_images_parent_image_id'), table_name='images')
op.drop_index(op.f('ix_images_imbo_id'), table_name='images')
op.drop_column('images', 'parent_image_id')
### end Alembic commands ###
|
|
b0ad67e88481b7656e56f6d1af9449eb43e43337
|
lowfat/management/commands/fixstatus.py
|
lowfat/management/commands/fixstatus.py
|
import datetime
from django.core.management.base import BaseCommand
from lowfat.models import Fund, Expense
class Command(BaseCommand):
help = "Fix the status on the database."
def handle(self, *args, **options):
# Blog posts
# Nothing to change
# Expense status
# Nothing to change
# Fund status
# Move approved to archived
for fund in Fund.objects.all():
if fund.status == "A":
day = datetime.timedelta(1)
if fund.added - datetime.datetime(2017, 12, 31) < day:
# Check for expenses
# Check for blog posts
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
else:
can_be_archive = True
expenses = Expense.objects.filter(
fund=fund,
)
if not expenses:
can_be_archive = False
for expense in expenses:
if expense.status in ["S", "C"]:
can_be_archive = False
break
if can_be_archive == True:
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
|
Add script to archive some funding requests
|
Add script to archive some funding requests
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Add script to archive some funding requests
|
import datetime
from django.core.management.base import BaseCommand
from lowfat.models import Fund, Expense
class Command(BaseCommand):
help = "Fix the status on the database."
def handle(self, *args, **options):
# Blog posts
# Nothing to change
# Expense status
# Nothing to change
# Fund status
# Move approved to archived
for fund in Fund.objects.all():
if fund.status == "A":
day = datetime.timedelta(1)
if fund.added - datetime.datetime(2017, 12, 31) < day:
# Check for expenses
# Check for blog posts
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
else:
can_be_archive = True
expenses = Expense.objects.filter(
fund=fund,
)
if not expenses:
can_be_archive = False
for expense in expenses:
if expense.status in ["S", "C"]:
can_be_archive = False
break
if can_be_archive == True:
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
|
<commit_before><commit_msg>Add script to archive some funding requests<commit_after>
|
import datetime
from django.core.management.base import BaseCommand
from lowfat.models import Fund, Expense
class Command(BaseCommand):
help = "Fix the status on the database."
def handle(self, *args, **options):
# Blog posts
# Nothing to change
# Expense status
# Nothing to change
# Fund status
# Move approved to archived
for fund in Fund.objects.all():
if fund.status == "A":
day = datetime.timedelta(1)
if fund.added - datetime.datetime(2017, 12, 31) < day:
# Check for expenses
# Check for blog posts
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
else:
can_be_archive = True
expenses = Expense.objects.filter(
fund=fund,
)
if not expenses:
can_be_archive = False
for expense in expenses:
if expense.status in ["S", "C"]:
can_be_archive = False
break
if can_be_archive == True:
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
|
Add script to archive some funding requestsimport datetime
from django.core.management.base import BaseCommand
from lowfat.models import Fund, Expense
class Command(BaseCommand):
help = "Fix the status on the database."
def handle(self, *args, **options):
# Blog posts
# Nothing to change
# Expense status
# Nothing to change
# Fund status
# Move approved to archived
for fund in Fund.objects.all():
if fund.status == "A":
day = datetime.timedelta(1)
if fund.added - datetime.datetime(2017, 12, 31) < day:
# Check for expenses
# Check for blog posts
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
else:
can_be_archive = True
expenses = Expense.objects.filter(
fund=fund,
)
if not expenses:
can_be_archive = False
for expense in expenses:
if expense.status in ["S", "C"]:
can_be_archive = False
break
if can_be_archive == True:
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
|
<commit_before><commit_msg>Add script to archive some funding requests<commit_after>import datetime
from django.core.management.base import BaseCommand
from lowfat.models import Fund, Expense
class Command(BaseCommand):
help = "Fix the status on the database."
def handle(self, *args, **options):
# Blog posts
# Nothing to change
# Expense status
# Nothing to change
# Fund status
# Move approved to archived
for fund in Fund.objects.all():
if fund.status == "A":
day = datetime.timedelta(1)
if fund.added - datetime.datetime(2017, 12, 31) < day:
# Check for expenses
# Check for blog posts
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
else:
can_be_archive = True
expenses = Expense.objects.filter(
fund=fund,
)
if not expenses:
can_be_archive = False
for expense in expenses:
if expense.status in ["S", "C"]:
can_be_archive = False
break
if can_be_archive == True:
print("Changing status for {}".format(fund))
fund.status = "F"
fund.save()
|
|
7d31792ad48cccf1e893cc2f5a1552d562030771
|
symposion/schedule/migrations/0004_remove_session_and_sessionrole.py
|
symposion/schedule/migrations/0004_remove_session_and_sessionrole.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-05 17:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('symposion_schedule', '0003_make_sessionrole_submitted_timezone_aware'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='day',
),
migrations.RemoveField(
model_name='session',
name='slots',
),
migrations.AlterUniqueTogether(
name='sessionrole',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sessionrole',
name='session',
),
migrations.RemoveField(
model_name='sessionrole',
name='user',
),
migrations.DeleteModel(
name='Session',
),
migrations.DeleteModel(
name='SessionRole',
),
]
|
Add migrations to remove Session and SessionRole.
|
Add migrations to remove Session and SessionRole.
See 7873a7959f27bbd39fde74f7912d24390d1f58a0.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
Add migrations to remove Session and SessionRole.
See 7873a7959f27bbd39fde74f7912d24390d1f58a0.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-05 17:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('symposion_schedule', '0003_make_sessionrole_submitted_timezone_aware'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='day',
),
migrations.RemoveField(
model_name='session',
name='slots',
),
migrations.AlterUniqueTogether(
name='sessionrole',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sessionrole',
name='session',
),
migrations.RemoveField(
model_name='sessionrole',
name='user',
),
migrations.DeleteModel(
name='Session',
),
migrations.DeleteModel(
name='SessionRole',
),
]
|
<commit_before><commit_msg>Add migrations to remove Session and SessionRole.
See 7873a7959f27bbd39fde74f7912d24390d1f58a0.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-05 17:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('symposion_schedule', '0003_make_sessionrole_submitted_timezone_aware'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='day',
),
migrations.RemoveField(
model_name='session',
name='slots',
),
migrations.AlterUniqueTogether(
name='sessionrole',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sessionrole',
name='session',
),
migrations.RemoveField(
model_name='sessionrole',
name='user',
),
migrations.DeleteModel(
name='Session',
),
migrations.DeleteModel(
name='SessionRole',
),
]
|
Add migrations to remove Session and SessionRole.
See 7873a7959f27bbd39fde74f7912d24390d1f58a0.# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-05 17:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('symposion_schedule', '0003_make_sessionrole_submitted_timezone_aware'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='day',
),
migrations.RemoveField(
model_name='session',
name='slots',
),
migrations.AlterUniqueTogether(
name='sessionrole',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sessionrole',
name='session',
),
migrations.RemoveField(
model_name='sessionrole',
name='user',
),
migrations.DeleteModel(
name='Session',
),
migrations.DeleteModel(
name='SessionRole',
),
]
|
<commit_before><commit_msg>Add migrations to remove Session and SessionRole.
See 7873a7959f27bbd39fde74f7912d24390d1f58a0.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-05 17:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('symposion_schedule', '0003_make_sessionrole_submitted_timezone_aware'),
]
operations = [
migrations.RemoveField(
model_name='session',
name='day',
),
migrations.RemoveField(
model_name='session',
name='slots',
),
migrations.AlterUniqueTogether(
name='sessionrole',
unique_together=set([]),
),
migrations.RemoveField(
model_name='sessionrole',
name='session',
),
migrations.RemoveField(
model_name='sessionrole',
name='user',
),
migrations.DeleteModel(
name='Session',
),
migrations.DeleteModel(
name='SessionRole',
),
]
|
|
5ba528773ca7b7bce61e983cfa2d9f028367805c
|
fat/migrations/0081_auto_20161010_1616.py
|
fat/migrations/0081_auto_20161010_1616.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-10 16:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0080_auto_20160831_0732'),
]
operations = [
migrations.AddField(
model_name='claimed',
name='data_carpentry_instructor',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='claimed',
name='software_carpentry_instructor',
field=models.BooleanField(default=False),
),
]
|
Add migration for Software Carpentry and Data Carpentry
|
Add migration for Software Carpentry and Data Carpentry
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Add migration for Software Carpentry and Data Carpentry
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-10 16:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0080_auto_20160831_0732'),
]
operations = [
migrations.AddField(
model_name='claimed',
name='data_carpentry_instructor',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='claimed',
name='software_carpentry_instructor',
field=models.BooleanField(default=False),
),
]
|
<commit_before><commit_msg>Add migration for Software Carpentry and Data Carpentry<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-10 16:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0080_auto_20160831_0732'),
]
operations = [
migrations.AddField(
model_name='claimed',
name='data_carpentry_instructor',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='claimed',
name='software_carpentry_instructor',
field=models.BooleanField(default=False),
),
]
|
Add migration for Software Carpentry and Data Carpentry# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-10 16:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0080_auto_20160831_0732'),
]
operations = [
migrations.AddField(
model_name='claimed',
name='data_carpentry_instructor',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='claimed',
name='software_carpentry_instructor',
field=models.BooleanField(default=False),
),
]
|
<commit_before><commit_msg>Add migration for Software Carpentry and Data Carpentry<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-10 16:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0080_auto_20160831_0732'),
]
operations = [
migrations.AddField(
model_name='claimed',
name='data_carpentry_instructor',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='claimed',
name='software_carpentry_instructor',
field=models.BooleanField(default=False),
),
]
|
|
36aa92b1c4a6c00cd50ed303565107b0213f09c2
|
enthought/chaco/tests/border_test_case.py
|
enthought/chaco/tests/border_test_case.py
|
""" Needed Tests
Component.draw_border() tests
--------------------
DONE *. draw_border output should match a similar draw_rect output
"""
import nose
import unittest
from numpy import array, alltrue, ravel
# Chaco imports
from enthought.chaco.api import Plot, PlotGraphicsContext
class DrawBorderTestCase(unittest.TestCase):
def assertRavelEqual(self, x, y):
self.assert_(alltrue(ravel(x) == ravel(y)), "\n%s\n !=\n%s" % (x, y))
def test_draw_border_simple(self):
""" Borders should have the correct height and width.
"""
size = (5,5)
container = Plot(padding=1, border_visible=True)
container.outer_bounds = list(size)
gc = PlotGraphicsContext(size)
gc.render_component(container)
desired = array(((255, 255, 255, 255, 255, 255),
(255, 255, 255, 255, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 0, 255, 0, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 255, 255, 255, 255, 255)))
actual = gc.bmp_array[:,:,0]
self.assertRavelEqual(actual, desired)
if __name__ == "__main__":
unittest.main()
|
Add a test case for component borders
|
Add a test case for component borders
|
Python
|
bsd-3-clause
|
tommy-u/chaco,ContinuumIO/chaco,tommy-u/chaco,burnpanck/chaco,ContinuumIO/chaco,ContinuumIO/chaco,tommy-u/chaco,burnpanck/chaco,ContinuumIO/chaco,burnpanck/chaco
|
Add a test case for component borders
|
""" Needed Tests
Component.draw_border() tests
--------------------
DONE *. draw_border output should match a similar draw_rect output
"""
import nose
import unittest
from numpy import array, alltrue, ravel
# Chaco imports
from enthought.chaco.api import Plot, PlotGraphicsContext
class DrawBorderTestCase(unittest.TestCase):
def assertRavelEqual(self, x, y):
self.assert_(alltrue(ravel(x) == ravel(y)), "\n%s\n !=\n%s" % (x, y))
def test_draw_border_simple(self):
""" Borders should have the correct height and width.
"""
size = (5,5)
container = Plot(padding=1, border_visible=True)
container.outer_bounds = list(size)
gc = PlotGraphicsContext(size)
gc.render_component(container)
desired = array(((255, 255, 255, 255, 255, 255),
(255, 255, 255, 255, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 0, 255, 0, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 255, 255, 255, 255, 255)))
actual = gc.bmp_array[:,:,0]
self.assertRavelEqual(actual, desired)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a test case for component borders<commit_after>
|
""" Needed Tests
Component.draw_border() tests
--------------------
DONE *. draw_border output should match a similar draw_rect output
"""
import nose
import unittest
from numpy import array, alltrue, ravel
# Chaco imports
from enthought.chaco.api import Plot, PlotGraphicsContext
class DrawBorderTestCase(unittest.TestCase):
def assertRavelEqual(self, x, y):
self.assert_(alltrue(ravel(x) == ravel(y)), "\n%s\n !=\n%s" % (x, y))
def test_draw_border_simple(self):
""" Borders should have the correct height and width.
"""
size = (5,5)
container = Plot(padding=1, border_visible=True)
container.outer_bounds = list(size)
gc = PlotGraphicsContext(size)
gc.render_component(container)
desired = array(((255, 255, 255, 255, 255, 255),
(255, 255, 255, 255, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 0, 255, 0, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 255, 255, 255, 255, 255)))
actual = gc.bmp_array[:,:,0]
self.assertRavelEqual(actual, desired)
if __name__ == "__main__":
unittest.main()
|
Add a test case for component borders""" Needed Tests
Component.draw_border() tests
--------------------
DONE *. draw_border output should match a similar draw_rect output
"""
import nose
import unittest
from numpy import array, alltrue, ravel
# Chaco imports
from enthought.chaco.api import Plot, PlotGraphicsContext
class DrawBorderTestCase(unittest.TestCase):
def assertRavelEqual(self, x, y):
self.assert_(alltrue(ravel(x) == ravel(y)), "\n%s\n !=\n%s" % (x, y))
def test_draw_border_simple(self):
""" Borders should have the correct height and width.
"""
size = (5,5)
container = Plot(padding=1, border_visible=True)
container.outer_bounds = list(size)
gc = PlotGraphicsContext(size)
gc.render_component(container)
desired = array(((255, 255, 255, 255, 255, 255),
(255, 255, 255, 255, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 0, 255, 0, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 255, 255, 255, 255, 255)))
actual = gc.bmp_array[:,:,0]
self.assertRavelEqual(actual, desired)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a test case for component borders<commit_after>""" Needed Tests
Component.draw_border() tests
--------------------
DONE *. draw_border output should match a similar draw_rect output
"""
import nose
import unittest
from numpy import array, alltrue, ravel
# Chaco imports
from enthought.chaco.api import Plot, PlotGraphicsContext
class DrawBorderTestCase(unittest.TestCase):
def assertRavelEqual(self, x, y):
self.assert_(alltrue(ravel(x) == ravel(y)), "\n%s\n !=\n%s" % (x, y))
def test_draw_border_simple(self):
""" Borders should have the correct height and width.
"""
size = (5,5)
container = Plot(padding=1, border_visible=True)
container.outer_bounds = list(size)
gc = PlotGraphicsContext(size)
gc.render_component(container)
desired = array(((255, 255, 255, 255, 255, 255),
(255, 255, 255, 255, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 0, 255, 0, 255, 255),
(255, 0, 0, 0, 255, 255),
(255, 255, 255, 255, 255, 255)))
actual = gc.bmp_array[:,:,0]
self.assertRavelEqual(actual, desired)
if __name__ == "__main__":
unittest.main()
|
|
edb2aaa60d56526a8f48ed40b04a8c903203bc64
|
web/utils/views.py
|
web/utils/views.py
|
import StringIO
import zipfile
from django.http import HttpResponse
def plain_text(name, contents, content_type='text/plain'):
"""
Downloads a plain text file with the given name and contents.
"""
response = HttpResponse(content_type='{0}; charset=utf-8'.format(content_type))
response['Content-Disposition'] = 'attachment; filename={0}'.format(name)
response.write(contents)
return response
def zip_archive(archive_name, files):
"""
Downloads a zip archive with the given name and containing the given
iterable of files, each represented by a pair, where the first component
gives the filename and the second one gives the contents.
"""
string_buffer = StringIO.StringIO()
archive = zipfile.ZipFile(string_buffer, 'w', zipfile.ZIP_DEFLATED)
for filename, contents in files:
archive.writestr(filename, contents.encode('utf-8'))
archive.close()
response = HttpResponse(string_buffer.getvalue(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={0}.zip'.format(archive_name)
return response
|
Add helper functions for downloading files
|
Add helper functions for downloading files
|
Python
|
agpl-3.0
|
matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo
|
Add helper functions for downloading files
|
import StringIO
import zipfile
from django.http import HttpResponse
def plain_text(name, contents, content_type='text/plain'):
"""
Downloads a plain text file with the given name and contents.
"""
response = HttpResponse(content_type='{0}; charset=utf-8'.format(content_type))
response['Content-Disposition'] = 'attachment; filename={0}'.format(name)
response.write(contents)
return response
def zip_archive(archive_name, files):
"""
Downloads a zip archive with the given name and containing the given
iterable of files, each represented by a pair, where the first component
gives the filename and the second one gives the contents.
"""
string_buffer = StringIO.StringIO()
archive = zipfile.ZipFile(string_buffer, 'w', zipfile.ZIP_DEFLATED)
for filename, contents in files:
archive.writestr(filename, contents.encode('utf-8'))
archive.close()
response = HttpResponse(string_buffer.getvalue(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={0}.zip'.format(archive_name)
return response
|
<commit_before><commit_msg>Add helper functions for downloading files<commit_after>
|
import StringIO
import zipfile
from django.http import HttpResponse
def plain_text(name, contents, content_type='text/plain'):
"""
Downloads a plain text file with the given name and contents.
"""
response = HttpResponse(content_type='{0}; charset=utf-8'.format(content_type))
response['Content-Disposition'] = 'attachment; filename={0}'.format(name)
response.write(contents)
return response
def zip_archive(archive_name, files):
"""
Downloads a zip archive with the given name and containing the given
iterable of files, each represented by a pair, where the first component
gives the filename and the second one gives the contents.
"""
string_buffer = StringIO.StringIO()
archive = zipfile.ZipFile(string_buffer, 'w', zipfile.ZIP_DEFLATED)
for filename, contents in files:
archive.writestr(filename, contents.encode('utf-8'))
archive.close()
response = HttpResponse(string_buffer.getvalue(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={0}.zip'.format(archive_name)
return response
|
Add helper functions for downloading filesimport StringIO
import zipfile
from django.http import HttpResponse
def plain_text(name, contents, content_type='text/plain'):
"""
Downloads a plain text file with the given name and contents.
"""
response = HttpResponse(content_type='{0}; charset=utf-8'.format(content_type))
response['Content-Disposition'] = 'attachment; filename={0}'.format(name)
response.write(contents)
return response
def zip_archive(archive_name, files):
"""
Downloads a zip archive with the given name and containing the given
iterable of files, each represented by a pair, where the first component
gives the filename and the second one gives the contents.
"""
string_buffer = StringIO.StringIO()
archive = zipfile.ZipFile(string_buffer, 'w', zipfile.ZIP_DEFLATED)
for filename, contents in files:
archive.writestr(filename, contents.encode('utf-8'))
archive.close()
response = HttpResponse(string_buffer.getvalue(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={0}.zip'.format(archive_name)
return response
|
<commit_before><commit_msg>Add helper functions for downloading files<commit_after>import StringIO
import zipfile
from django.http import HttpResponse
def plain_text(name, contents, content_type='text/plain'):
"""
Downloads a plain text file with the given name and contents.
"""
response = HttpResponse(content_type='{0}; charset=utf-8'.format(content_type))
response['Content-Disposition'] = 'attachment; filename={0}'.format(name)
response.write(contents)
return response
def zip_archive(archive_name, files):
"""
Downloads a zip archive with the given name and containing the given
iterable of files, each represented by a pair, where the first component
gives the filename and the second one gives the contents.
"""
string_buffer = StringIO.StringIO()
archive = zipfile.ZipFile(string_buffer, 'w', zipfile.ZIP_DEFLATED)
for filename, contents in files:
archive.writestr(filename, contents.encode('utf-8'))
archive.close()
response = HttpResponse(string_buffer.getvalue(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={0}.zip'.format(archive_name)
return response
|
|
7726ac8471730cdcb426ef002c7b2046cdffd602
|
sippy/IVoiSysAuthorisation.py
|
sippy/IVoiSysAuthorisation.py
|
# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from SQLTransactionManager import SQLTransactionManager
from time import time
class IVoiSysAuthorisation(object):
def __init__(self, global_config):
dsn = 'mysql://sbcfront:gb19!lDLn2#)F$NFbd2*@sbcdb1.pennytel.com/sbc'
self.sql_tm = SQLTransactionManager(dsn, nworkers = 4, lazy_connect = True)
def do_auth(self, username, res_cb, *cb_args):
self.sql_tm.sendQuery(('SELECT password, outbound_proxy, domain FROM SBC_Reg_Config ' \
'WHERE account_number = \'%s\'' % username), self._process_result, 0, False, None,
res_cb, cb_args)
def _process_result(self, results, exceptions, res_cb, cb_args):
print results, exceptions
if exceptions[0] != None or len(results[0]) == 0:
res_cb(None, *cb_args)
else:
password, outbound_proxy, domain = results[0][0]
res_cb((password, (outbound_proxy, 5060), domain), *cb_args)
|
Add class specific for IVoiSys to do DB auth.
|
Add class specific for IVoiSys to do DB auth.
|
Python
|
bsd-2-clause
|
AVOXI/b2bua,sippy/b2bua,sippy/b2bua,AVOXI/b2bua
|
Add class specific for IVoiSys to do DB auth.
|
# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from SQLTransactionManager import SQLTransactionManager
from time import time
class IVoiSysAuthorisation(object):
def __init__(self, global_config):
dsn = 'mysql://sbcfront:gb19!lDLn2#)F$NFbd2*@sbcdb1.pennytel.com/sbc'
self.sql_tm = SQLTransactionManager(dsn, nworkers = 4, lazy_connect = True)
def do_auth(self, username, res_cb, *cb_args):
self.sql_tm.sendQuery(('SELECT password, outbound_proxy, domain FROM SBC_Reg_Config ' \
'WHERE account_number = \'%s\'' % username), self._process_result, 0, False, None,
res_cb, cb_args)
def _process_result(self, results, exceptions, res_cb, cb_args):
print results, exceptions
if exceptions[0] != None or len(results[0]) == 0:
res_cb(None, *cb_args)
else:
password, outbound_proxy, domain = results[0][0]
res_cb((password, (outbound_proxy, 5060), domain), *cb_args)
|
<commit_before><commit_msg>Add class specific for IVoiSys to do DB auth.<commit_after>
|
# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from SQLTransactionManager import SQLTransactionManager
from time import time
class IVoiSysAuthorisation(object):
def __init__(self, global_config):
dsn = 'mysql://sbcfront:gb19!lDLn2#)F$NFbd2*@sbcdb1.pennytel.com/sbc'
self.sql_tm = SQLTransactionManager(dsn, nworkers = 4, lazy_connect = True)
def do_auth(self, username, res_cb, *cb_args):
self.sql_tm.sendQuery(('SELECT password, outbound_proxy, domain FROM SBC_Reg_Config ' \
'WHERE account_number = \'%s\'' % username), self._process_result, 0, False, None,
res_cb, cb_args)
def _process_result(self, results, exceptions, res_cb, cb_args):
print results, exceptions
if exceptions[0] != None or len(results[0]) == 0:
res_cb(None, *cb_args)
else:
password, outbound_proxy, domain = results[0][0]
res_cb((password, (outbound_proxy, 5060), domain), *cb_args)
|
Add class specific for IVoiSys to do DB auth.# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from SQLTransactionManager import SQLTransactionManager
from time import time
class IVoiSysAuthorisation(object):
def __init__(self, global_config):
dsn = 'mysql://sbcfront:gb19!lDLn2#)F$NFbd2*@sbcdb1.pennytel.com/sbc'
self.sql_tm = SQLTransactionManager(dsn, nworkers = 4, lazy_connect = True)
def do_auth(self, username, res_cb, *cb_args):
self.sql_tm.sendQuery(('SELECT password, outbound_proxy, domain FROM SBC_Reg_Config ' \
'WHERE account_number = \'%s\'' % username), self._process_result, 0, False, None,
res_cb, cb_args)
def _process_result(self, results, exceptions, res_cb, cb_args):
print results, exceptions
if exceptions[0] != None or len(results[0]) == 0:
res_cb(None, *cb_args)
else:
password, outbound_proxy, domain = results[0][0]
res_cb((password, (outbound_proxy, 5060), domain), *cb_args)
|
<commit_before><commit_msg>Add class specific for IVoiSys to do DB auth.<commit_after># Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from SQLTransactionManager import SQLTransactionManager
from time import time
class IVoiSysAuthorisation(object):
def __init__(self, global_config):
dsn = 'mysql://sbcfront:gb19!lDLn2#)F$NFbd2*@sbcdb1.pennytel.com/sbc'
self.sql_tm = SQLTransactionManager(dsn, nworkers = 4, lazy_connect = True)
def do_auth(self, username, res_cb, *cb_args):
self.sql_tm.sendQuery(('SELECT password, outbound_proxy, domain FROM SBC_Reg_Config ' \
'WHERE account_number = \'%s\'' % username), self._process_result, 0, False, None,
res_cb, cb_args)
def _process_result(self, results, exceptions, res_cb, cb_args):
print results, exceptions
if exceptions[0] != None or len(results[0]) == 0:
res_cb(None, *cb_args)
else:
password, outbound_proxy, domain = results[0][0]
res_cb((password, (outbound_proxy, 5060), domain), *cb_args)
|
|
d77a01a76bfe4789cf76f656b7136069ddac33a3
|
scipy/constants/tests/test_constants.py
|
scipy/constants/tests/test_constants.py
|
from numpy.testing import run_module_suite, assert_equal
import scipy.constants as sc
def test_fahrenheit_to_celcius():
assert_equal(sc.F2C(32), 0)
assert_equal(sc.F2C([32, 32]), [0, 0])
def test_celcius_to_kelvin():
assert_equal(sc.C2K([0, 0]), [273.15, 273.15])
def test_kelvin_to_celcius():
assert_equal(sc.K2C([0, 0]), [-273.15, -273.15])
def test_fahrenheit_to_kelvin():
assert_equal(sc.F2K([32, 32]), [273.15, 273.15])
def test_kelvin_to_fahrenheit():
assert_equal(sc.K2F([273.15, 273.15]), [32, 32])
def test_celcius_to_fahrenheit():
assert_equal(sc.C2F([0, 0]), [32, 32])
def test_lambda_to_nu():
assert_equal(sc.lambda2nu(sc.speed_of_light), 1)
def test_nu_to_lambda():
assert_equal(sc.nu2lambda(1), sc.speed_of_light)
if __name__ == "__main__":
run_module_suite()
|
Add basic tests for constants.
|
ENH: Add basic tests for constants.
|
Python
|
bsd-3-clause
|
arokem/scipy,zxsted/scipy,cpaulik/scipy,mingwpy/scipy,Gillu13/scipy,njwilson23/scipy,matthew-brett/scipy,mikebenfield/scipy,pnedunuri/scipy,larsmans/scipy,ilayn/scipy,jonycgn/scipy,dch312/scipy,anntzer/scipy,maniteja123/scipy,futurulus/scipy,Dapid/scipy,hainm/scipy,WarrenWeckesser/scipy,e-q/scipy,person142/scipy,lhilt/scipy,pschella/scipy,pizzathief/scipy,josephcslater/scipy,zaxliu/scipy,vigna/scipy,jakevdp/scipy,giorgiop/scipy,dominicelse/scipy,zxsted/scipy,lukauskas/scipy,apbard/scipy,ortylp/scipy,mortonjt/scipy,kalvdans/scipy,woodscn/scipy,jamestwebber/scipy,trankmichael/scipy,raoulbq/scipy,Newman101/scipy,Shaswat27/scipy,Dapid/scipy,haudren/scipy,hainm/scipy,Srisai85/scipy,richardotis/scipy,ilayn/scipy,Newman101/scipy,andyfaff/scipy,richardotis/scipy,mikebenfield/scipy,fernand/scipy,raoulbq/scipy,andim/scipy,matthewalbani/scipy,juliantaylor/scipy,giorgiop/scipy,andyfaff/scipy,arokem/scipy,gertingold/scipy,befelix/scipy,ChanderG/scipy,mortada/scipy,pnedunuri/scipy,grlee77/scipy,nvoron23/scipy,ogrisel/scipy,gef756/scipy,zxsted/scipy,jor-/scipy,mgaitan/scipy,zerothi/scipy,juliantaylor/scipy,anielsen001/scipy,anntzer/scipy,grlee77/scipy,felipebetancur/scipy,andim/scipy,zaxliu/scipy,teoliphant/scipy,mtrbean/scipy,sonnyhu/scipy,matthew-brett/scipy,maciejkula/scipy,bkendzior/scipy,Srisai85/scipy,surhudm/scipy,nvoron23/scipy,e-q/scipy,fredrikw/scipy,person142/scipy,witcxc/scipy,ChanderG/scipy,vanpact/scipy,mtrbean/scipy,aarchiba/scipy,person142/scipy,maciejkula/scipy,e-q/scipy,zerothi/scipy,jseabold/scipy,nonhermitian/scipy,zerothi/scipy,giorgiop/scipy,trankmichael/scipy,dch312/scipy,Kamp9/scipy,ales-erjavec/scipy,Stefan-Endres/scipy,nonhermitian/scipy,Eric89GXL/scipy,efiring/scipy,piyush0609/scipy,mdhaber/scipy,mingwpy/scipy,ilayn/scipy,endolith/scipy,niknow/scipy,nmayorov/scipy,FRidh/scipy,woodscn/scipy,ogrisel/scipy,gdooper/scipy,nvoron23/scipy,Shaswat27/scipy,Shaswat27/scipy,kleskjr/scipy,pnedunuri/scipy,mtrbean/scipy,newemailjdm/scipy,vigna/scipy,sauliusl/scipy,argriffing/scipy,surhudm/scipy,ChanderG/scipy,Shaswat27/scipy,e-q/scipy,Shaswat27/scipy,lukauskas/scipy,josephcslater/scipy,petebachant/scipy,sriki18/scipy,jor-/scipy,teoliphant/scipy,jseabold/scipy,petebachant/scipy,jakevdp/scipy,niknow/scipy,mgaitan/scipy,petebachant/scipy,pyramania/scipy,perimosocordiae/scipy,aarchiba/scipy,pbrod/scipy,aeklant/scipy,tylerjereddy/scipy,mortonjt/scipy,Eric89GXL/scipy,pyramania/scipy,niknow/scipy,lukauskas/scipy,Dapid/scipy,Newman101/scipy,WarrenWeckesser/scipy,piyush0609/scipy,anntzer/scipy,pnedunuri/scipy,piyush0609/scipy,mortada/scipy,niknow/scipy,vigna/scipy,FRidh/scipy,FRidh/scipy,aeklant/scipy,maciejkula/scipy,behzadnouri/scipy,felipebetancur/scipy,aeklant/scipy,mdhaber/scipy,fernand/scipy,mortonjt/scipy,zaxliu/scipy,person142/scipy,maniteja123/scipy,ales-erjavec/scipy,hainm/scipy,mortada/scipy,larsmans/scipy,sargas/scipy,giorgiop/scipy,ortylp/scipy,mtrbean/scipy,maniteja123/scipy,ndchorley/scipy,Kamp9/scipy,richardotis/scipy,lhilt/scipy,raoulbq/scipy,vhaasteren/scipy,pbrod/scipy,nonhermitian/scipy,perimosocordiae/scipy,bkendzior/scipy,gertingold/scipy,zerothi/scipy,nmayorov/scipy,Dapid/scipy,pschella/scipy,njwilson23/scipy,surhudm/scipy,dch312/scipy,pnedunuri/scipy,larsmans/scipy,arokem/scipy,minhlongdo/scipy,pnedunuri/scipy,jsilter/scipy,gef756/scipy,newemailjdm/scipy,vhaasteren/scipy,gertingold/scipy,vigna/scipy,hainm/scipy,Kamp9/scipy,aarchiba/scipy,kalvdans/scipy,pbrod/scipy,surhudm/scipy,juliantaylor/scipy,gef756/scipy,ndchorley/scipy,jsilter/scipy,pbrod/scipy,raoulbq/scipy,woodscn/scipy,WillieMaddox/scipy,lhilt/scipy,befelix/scipy,kalvdans/scipy,zxsted/scipy,behzadnouri/scipy,perimosocordiae/scipy,person142/scipy,lukauskas/scipy,chatcannon/scipy,andyfaff/scipy,lukauskas/scipy,minhlongdo/scipy,nonhermitian/scipy,FRidh/scipy,matthew-brett/scipy,anielsen001/scipy,gfyoung/scipy,efiring/scipy,gdooper/scipy,haudren/scipy,giorgiop/scipy,jjhelmus/scipy,WillieMaddox/scipy,teoliphant/scipy,zerothi/scipy,pyramania/scipy,jsilter/scipy,vhaasteren/scipy,sauliusl/scipy,mdhaber/scipy,WillieMaddox/scipy,jamestwebber/scipy,ndchorley/scipy,woodscn/scipy,pizzathief/scipy,zerothi/scipy,mdhaber/scipy,Stefan-Endres/scipy,matthew-brett/scipy,fredrikw/scipy,ortylp/scipy,mortonjt/scipy,mingwpy/scipy,gertingold/scipy,behzadnouri/scipy,jamestwebber/scipy,ndchorley/scipy,woodscn/scipy,Eric89GXL/scipy,efiring/scipy,andyfaff/scipy,ales-erjavec/scipy,andim/scipy,fernand/scipy,aman-iitj/scipy,cpaulik/scipy,larsmans/scipy,jonycgn/scipy,rmcgibbo/scipy,jseabold/scipy,mingwpy/scipy,mgaitan/scipy,jakevdp/scipy,gdooper/scipy,sonnyhu/scipy,behzadnouri/scipy,scipy/scipy,sonnyhu/scipy,teoliphant/scipy,jseabold/scipy,nvoron23/scipy,mgaitan/scipy,mortonjt/scipy,ogrisel/scipy,andim/scipy,witcxc/scipy,jor-/scipy,WarrenWeckesser/scipy,scipy/scipy,mortonjt/scipy,mhogg/scipy,gdooper/scipy,endolith/scipy,Newman101/scipy,mortada/scipy,nvoron23/scipy,Eric89GXL/scipy,WarrenWeckesser/scipy,kleskjr/scipy,Srisai85/scipy,zaxliu/scipy,andim/scipy,Kamp9/scipy,futurulus/scipy,futurulus/scipy,efiring/scipy,rmcgibbo/scipy,kleskjr/scipy,dominicelse/scipy,Newman101/scipy,anielsen001/scipy,woodscn/scipy,petebachant/scipy,maniteja123/scipy,Gillu13/scipy,argriffing/scipy,surhudm/scipy,gertingold/scipy,petebachant/scipy,jjhelmus/scipy,endolith/scipy,rgommers/scipy,njwilson23/scipy,ortylp/scipy,dominicelse/scipy,endolith/scipy,jamestwebber/scipy,anntzer/scipy,piyush0609/scipy,anielsen001/scipy,newemailjdm/scipy,kleskjr/scipy,felipebetancur/scipy,witcxc/scipy,richardotis/scipy,josephcslater/scipy,e-q/scipy,ortylp/scipy,nmayorov/scipy,kleskjr/scipy,josephcslater/scipy,Stefan-Endres/scipy,sonnyhu/scipy,raoulbq/scipy,surhudm/scipy,lhilt/scipy,Srisai85/scipy,mtrbean/scipy,vanpact/scipy,perimosocordiae/scipy,matthew-brett/scipy,anntzer/scipy,sriki18/scipy,felipebetancur/scipy,rgommers/scipy,ChanderG/scipy,vberaudi/scipy,trankmichael/scipy,Eric89GXL/scipy,haudren/scipy,chatcannon/scipy,gfyoung/scipy,njwilson23/scipy,efiring/scipy,mingwpy/scipy,argriffing/scipy,josephcslater/scipy,jonycgn/scipy,newemailjdm/scipy,vberaudi/scipy,ogrisel/scipy,apbard/scipy,WillieMaddox/scipy,mikebenfield/scipy,maciejkula/scipy,apbard/scipy,fernand/scipy,perimosocordiae/scipy,Gillu13/scipy,futurulus/scipy,anntzer/scipy,Gillu13/scipy,teoliphant/scipy,Srisai85/scipy,jsilter/scipy,jakevdp/scipy,scipy/scipy,jor-/scipy,newemailjdm/scipy,jjhelmus/scipy,aarchiba/scipy,pbrod/scipy,niknow/scipy,cpaulik/scipy,ilayn/scipy,gfyoung/scipy,mgaitan/scipy,kleskjr/scipy,behzadnouri/scipy,futurulus/scipy,piyush0609/scipy,aman-iitj/scipy,tylerjereddy/scipy,befelix/scipy,befelix/scipy,njwilson23/scipy,fredrikw/scipy,andyfaff/scipy,futurulus/scipy,grlee77/scipy,pschella/scipy,matthewalbani/scipy,zxsted/scipy,WillieMaddox/scipy,aman-iitj/scipy,mhogg/scipy,minhlongdo/scipy,ndchorley/scipy,minhlongdo/scipy,pschella/scipy,cpaulik/scipy,mortada/scipy,hainm/scipy,nonhermitian/scipy,Kamp9/scipy,sauliusl/scipy,Dapid/scipy,pizzathief/scipy,bkendzior/scipy,pyramania/scipy,jseabold/scipy,ales-erjavec/scipy,Stefan-Endres/scipy,haudren/scipy,sriki18/scipy,WarrenWeckesser/scipy,sriki18/scipy,chatcannon/scipy,gfyoung/scipy,ndchorley/scipy,scipy/scipy,minhlongdo/scipy,pschella/scipy,jamestwebber/scipy,anielsen001/scipy,trankmichael/scipy,rmcgibbo/scipy,lukauskas/scipy,arokem/scipy,rgommers/scipy,chatcannon/scipy,vberaudi/scipy,maniteja123/scipy,rmcgibbo/scipy,nvoron23/scipy,behzadnouri/scipy,bkendzior/scipy,vberaudi/scipy,rgommers/scipy,maciejkula/scipy,zaxliu/scipy,sonnyhu/scipy,ales-erjavec/scipy,hainm/scipy,jonycgn/scipy,zaxliu/scipy,trankmichael/scipy,larsmans/scipy,witcxc/scipy,vhaasteren/scipy,aman-iitj/scipy,rmcgibbo/scipy,tylerjereddy/scipy,sargas/scipy,sonnyhu/scipy,vberaudi/scipy,ilayn/scipy,petebachant/scipy,andim/scipy,matthewalbani/scipy,vhaasteren/scipy,ortylp/scipy,larsmans/scipy,jor-/scipy,jonycgn/scipy,sauliusl/scipy,pbrod/scipy,FRidh/scipy,mhogg/scipy,mingwpy/scipy,dch312/scipy,WarrenWeckesser/scipy,mikebenfield/scipy,jjhelmus/scipy,fredrikw/scipy,argriffing/scipy,scipy/scipy,Eric89GXL/scipy,argriffing/scipy,mdhaber/scipy,trankmichael/scipy,Srisai85/scipy,fredrikw/scipy,zxsted/scipy,richardotis/scipy,aman-iitj/scipy,jsilter/scipy,fernand/scipy,sriki18/scipy,Dapid/scipy,andyfaff/scipy,Kamp9/scipy,apbard/scipy,sargas/scipy,mtrbean/scipy,witcxc/scipy,nmayorov/scipy,vanpact/scipy,gef756/scipy,newemailjdm/scipy,aarchiba/scipy,vhaasteren/scipy,aeklant/scipy,FRidh/scipy,ChanderG/scipy,grlee77/scipy,WillieMaddox/scipy,scipy/scipy,aeklant/scipy,argriffing/scipy,sargas/scipy,ChanderG/scipy,haudren/scipy,fernand/scipy,haudren/scipy,chatcannon/scipy,mortada/scipy,apbard/scipy,Newman101/scipy,tylerjereddy/scipy,juliantaylor/scipy,mdhaber/scipy,arokem/scipy,giorgiop/scipy,endolith/scipy,jjhelmus/scipy,dch312/scipy,jseabold/scipy,richardotis/scipy,vanpact/scipy,piyush0609/scipy,chatcannon/scipy,juliantaylor/scipy,bkendzior/scipy,sargas/scipy,cpaulik/scipy,vanpact/scipy,raoulbq/scipy,niknow/scipy,matthewalbani/scipy,Gillu13/scipy,endolith/scipy,mhogg/scipy,mgaitan/scipy,gef756/scipy,lhilt/scipy,sriki18/scipy,Stefan-Endres/scipy,rgommers/scipy,mikebenfield/scipy,grlee77/scipy,dominicelse/scipy,jonycgn/scipy,sauliusl/scipy,felipebetancur/scipy,fredrikw/scipy,maniteja123/scipy,cpaulik/scipy,Gillu13/scipy,pyramania/scipy,ogrisel/scipy,felipebetancur/scipy,dominicelse/scipy,minhlongdo/scipy,ales-erjavec/scipy,rmcgibbo/scipy,sauliusl/scipy,kalvdans/scipy,gfyoung/scipy,jakevdp/scipy,ilayn/scipy,befelix/scipy,tylerjereddy/scipy,vberaudi/scipy,pizzathief/scipy,mhogg/scipy,Shaswat27/scipy,vigna/scipy,perimosocordiae/scipy,aman-iitj/scipy,nmayorov/scipy,njwilson23/scipy,efiring/scipy,kalvdans/scipy,Stefan-Endres/scipy,gef756/scipy,anielsen001/scipy,matthewalbani/scipy,gdooper/scipy,mhogg/scipy,pizzathief/scipy,vanpact/scipy
|
ENH: Add basic tests for constants.
|
from numpy.testing import run_module_suite, assert_equal
import scipy.constants as sc
def test_fahrenheit_to_celcius():
assert_equal(sc.F2C(32), 0)
assert_equal(sc.F2C([32, 32]), [0, 0])
def test_celcius_to_kelvin():
assert_equal(sc.C2K([0, 0]), [273.15, 273.15])
def test_kelvin_to_celcius():
assert_equal(sc.K2C([0, 0]), [-273.15, -273.15])
def test_fahrenheit_to_kelvin():
assert_equal(sc.F2K([32, 32]), [273.15, 273.15])
def test_kelvin_to_fahrenheit():
assert_equal(sc.K2F([273.15, 273.15]), [32, 32])
def test_celcius_to_fahrenheit():
assert_equal(sc.C2F([0, 0]), [32, 32])
def test_lambda_to_nu():
assert_equal(sc.lambda2nu(sc.speed_of_light), 1)
def test_nu_to_lambda():
assert_equal(sc.nu2lambda(1), sc.speed_of_light)
if __name__ == "__main__":
run_module_suite()
|
<commit_before><commit_msg>ENH: Add basic tests for constants.<commit_after>
|
from numpy.testing import run_module_suite, assert_equal
import scipy.constants as sc
def test_fahrenheit_to_celcius():
assert_equal(sc.F2C(32), 0)
assert_equal(sc.F2C([32, 32]), [0, 0])
def test_celcius_to_kelvin():
assert_equal(sc.C2K([0, 0]), [273.15, 273.15])
def test_kelvin_to_celcius():
assert_equal(sc.K2C([0, 0]), [-273.15, -273.15])
def test_fahrenheit_to_kelvin():
assert_equal(sc.F2K([32, 32]), [273.15, 273.15])
def test_kelvin_to_fahrenheit():
assert_equal(sc.K2F([273.15, 273.15]), [32, 32])
def test_celcius_to_fahrenheit():
assert_equal(sc.C2F([0, 0]), [32, 32])
def test_lambda_to_nu():
assert_equal(sc.lambda2nu(sc.speed_of_light), 1)
def test_nu_to_lambda():
assert_equal(sc.nu2lambda(1), sc.speed_of_light)
if __name__ == "__main__":
run_module_suite()
|
ENH: Add basic tests for constants.from numpy.testing import run_module_suite, assert_equal
import scipy.constants as sc
def test_fahrenheit_to_celcius():
assert_equal(sc.F2C(32), 0)
assert_equal(sc.F2C([32, 32]), [0, 0])
def test_celcius_to_kelvin():
assert_equal(sc.C2K([0, 0]), [273.15, 273.15])
def test_kelvin_to_celcius():
assert_equal(sc.K2C([0, 0]), [-273.15, -273.15])
def test_fahrenheit_to_kelvin():
assert_equal(sc.F2K([32, 32]), [273.15, 273.15])
def test_kelvin_to_fahrenheit():
assert_equal(sc.K2F([273.15, 273.15]), [32, 32])
def test_celcius_to_fahrenheit():
assert_equal(sc.C2F([0, 0]), [32, 32])
def test_lambda_to_nu():
assert_equal(sc.lambda2nu(sc.speed_of_light), 1)
def test_nu_to_lambda():
assert_equal(sc.nu2lambda(1), sc.speed_of_light)
if __name__ == "__main__":
run_module_suite()
|
<commit_before><commit_msg>ENH: Add basic tests for constants.<commit_after>from numpy.testing import run_module_suite, assert_equal
import scipy.constants as sc
def test_fahrenheit_to_celcius():
assert_equal(sc.F2C(32), 0)
assert_equal(sc.F2C([32, 32]), [0, 0])
def test_celcius_to_kelvin():
assert_equal(sc.C2K([0, 0]), [273.15, 273.15])
def test_kelvin_to_celcius():
assert_equal(sc.K2C([0, 0]), [-273.15, -273.15])
def test_fahrenheit_to_kelvin():
assert_equal(sc.F2K([32, 32]), [273.15, 273.15])
def test_kelvin_to_fahrenheit():
assert_equal(sc.K2F([273.15, 273.15]), [32, 32])
def test_celcius_to_fahrenheit():
assert_equal(sc.C2F([0, 0]), [32, 32])
def test_lambda_to_nu():
assert_equal(sc.lambda2nu(sc.speed_of_light), 1)
def test_nu_to_lambda():
assert_equal(sc.nu2lambda(1), sc.speed_of_light)
if __name__ == "__main__":
run_module_suite()
|
|
d6b11eec630d3709d69e3b3878a7859b67cb0fd4
|
src/compare_meta_data.py
|
src/compare_meta_data.py
|
import argparse
import json
import re
from os.path import join
file_regex = re.compile("(^[a-z\-]*)")
results_log = "query_results.json"
def extract_arXiv_topic(filename):
return_topic = ""
matches = file_regex.match(filename).groups()
if len(matches) > 0:
return_topic = matches[0]
return return_topic
def gen_match_results(query_file, directory):
match_results = dict()
with open(query_file) as f:
json_data = f.read()
json_file = json.loads(json_data)
for corpus_name, data in json_file.iteritems():
match_results[corpus_name] = dict()
for query, results in data["queries"].iteritems():
match_results[corpus_name][query] = dict()
for result in results.itervalues():
topic = extract_arXiv_topic(result["file"])
if topic in match_results[corpus_name][query]:
match_results[corpus_name][query][topic] += 1
else:
match_results[corpus_name][query][topic] = 1
json.dump(match_results, open(join(directory, "meta_results.json"), 'w'))
def main():
parser = argparse.ArgumentParser(description='script to compare similarities generated by run sim')
parser.add_argument('file', help='input json file')
parser.add_argument('directory', help='output directory for json.')
args = parser.parse_args()
gen_match_results(args.file, args.directory)
if __name__ == "__main__":
main()
|
Add script to compare similarites with meta data
|
Add script to compare similarites with meta data
|
Python
|
mit
|
PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project
|
Add script to compare similarites with meta data
|
import argparse
import json
import re
from os.path import join
file_regex = re.compile("(^[a-z\-]*)")
results_log = "query_results.json"
def extract_arXiv_topic(filename):
return_topic = ""
matches = file_regex.match(filename).groups()
if len(matches) > 0:
return_topic = matches[0]
return return_topic
def gen_match_results(query_file, directory):
match_results = dict()
with open(query_file) as f:
json_data = f.read()
json_file = json.loads(json_data)
for corpus_name, data in json_file.iteritems():
match_results[corpus_name] = dict()
for query, results in data["queries"].iteritems():
match_results[corpus_name][query] = dict()
for result in results.itervalues():
topic = extract_arXiv_topic(result["file"])
if topic in match_results[corpus_name][query]:
match_results[corpus_name][query][topic] += 1
else:
match_results[corpus_name][query][topic] = 1
json.dump(match_results, open(join(directory, "meta_results.json"), 'w'))
def main():
parser = argparse.ArgumentParser(description='script to compare similarities generated by run sim')
parser.add_argument('file', help='input json file')
parser.add_argument('directory', help='output directory for json.')
args = parser.parse_args()
gen_match_results(args.file, args.directory)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to compare similarites with meta data<commit_after>
|
import argparse
import json
import re
from os.path import join
file_regex = re.compile("(^[a-z\-]*)")
results_log = "query_results.json"
def extract_arXiv_topic(filename):
return_topic = ""
matches = file_regex.match(filename).groups()
if len(matches) > 0:
return_topic = matches[0]
return return_topic
def gen_match_results(query_file, directory):
match_results = dict()
with open(query_file) as f:
json_data = f.read()
json_file = json.loads(json_data)
for corpus_name, data in json_file.iteritems():
match_results[corpus_name] = dict()
for query, results in data["queries"].iteritems():
match_results[corpus_name][query] = dict()
for result in results.itervalues():
topic = extract_arXiv_topic(result["file"])
if topic in match_results[corpus_name][query]:
match_results[corpus_name][query][topic] += 1
else:
match_results[corpus_name][query][topic] = 1
json.dump(match_results, open(join(directory, "meta_results.json"), 'w'))
def main():
parser = argparse.ArgumentParser(description='script to compare similarities generated by run sim')
parser.add_argument('file', help='input json file')
parser.add_argument('directory', help='output directory for json.')
args = parser.parse_args()
gen_match_results(args.file, args.directory)
if __name__ == "__main__":
main()
|
Add script to compare similarites with meta dataimport argparse
import json
import re
from os.path import join
file_regex = re.compile("(^[a-z\-]*)")
results_log = "query_results.json"
def extract_arXiv_topic(filename):
return_topic = ""
matches = file_regex.match(filename).groups()
if len(matches) > 0:
return_topic = matches[0]
return return_topic
def gen_match_results(query_file, directory):
match_results = dict()
with open(query_file) as f:
json_data = f.read()
json_file = json.loads(json_data)
for corpus_name, data in json_file.iteritems():
match_results[corpus_name] = dict()
for query, results in data["queries"].iteritems():
match_results[corpus_name][query] = dict()
for result in results.itervalues():
topic = extract_arXiv_topic(result["file"])
if topic in match_results[corpus_name][query]:
match_results[corpus_name][query][topic] += 1
else:
match_results[corpus_name][query][topic] = 1
json.dump(match_results, open(join(directory, "meta_results.json"), 'w'))
def main():
parser = argparse.ArgumentParser(description='script to compare similarities generated by run sim')
parser.add_argument('file', help='input json file')
parser.add_argument('directory', help='output directory for json.')
args = parser.parse_args()
gen_match_results(args.file, args.directory)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to compare similarites with meta data<commit_after>import argparse
import json
import re
from os.path import join
file_regex = re.compile("(^[a-z\-]*)")
results_log = "query_results.json"
def extract_arXiv_topic(filename):
return_topic = ""
matches = file_regex.match(filename).groups()
if len(matches) > 0:
return_topic = matches[0]
return return_topic
def gen_match_results(query_file, directory):
match_results = dict()
with open(query_file) as f:
json_data = f.read()
json_file = json.loads(json_data)
for corpus_name, data in json_file.iteritems():
match_results[corpus_name] = dict()
for query, results in data["queries"].iteritems():
match_results[corpus_name][query] = dict()
for result in results.itervalues():
topic = extract_arXiv_topic(result["file"])
if topic in match_results[corpus_name][query]:
match_results[corpus_name][query][topic] += 1
else:
match_results[corpus_name][query][topic] = 1
json.dump(match_results, open(join(directory, "meta_results.json"), 'w'))
def main():
parser = argparse.ArgumentParser(description='script to compare similarities generated by run sim')
parser.add_argument('file', help='input json file')
parser.add_argument('directory', help='output directory for json.')
args = parser.parse_args()
gen_match_results(args.file, args.directory)
if __name__ == "__main__":
main()
|
|
8df57b5241ba04ec8444546ed2356ae421796d23
|
django_vend/core/forms.py
|
django_vend/core/forms.py
|
import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
|
Create form field that can accept dates in the format used by the Vend API
|
Create form field that can accept dates in the format used by the Vend API
|
Python
|
bsd-3-clause
|
remarkablerocket/django-vend,remarkablerocket/django-vend
|
Create form field that can accept dates in the format used by the Vend API
|
import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
|
<commit_before><commit_msg>Create form field that can accept dates in the format used by the Vend API<commit_after>
|
import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
|
Create form field that can accept dates in the format used by the Vend APIimport re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
|
<commit_before><commit_msg>Create form field that can accept dates in the format used by the Vend API<commit_after>import re
from django import forms
from django.utils.dateparse import parse_datetime
from django.core.exceptions import ValidationError
def valid_date(date):
regex = ("^(?:[1-9]\d{3}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1\d|2[0-8])|(?:0[13"
"-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31)|(?:[1-9]\d(?:0[48]|["
"2468][048]|[13579][26])|(?:[2468][048]|[13579][26])00)-02-29)T(?:"
"[01]\d|2[0-3]):[0-5]\d:[0-5]\d(?:Z|[+-][01]\d:[0-5]\d)$")
return re.search(regex, date)
class VendDateTimeField(forms.DateTimeField):
def to_python(self, value):
if value not in self.empty_values and valid_date(value):
try:
value = parse_datetime(value)
except ValueError:
pass
return super(VendDateTimeField, self).to_python(value)
|
|
cb88cdb82eb8032b2a83f006aa7c5e153cbbe8e2
|
python/smqtk/data_rep/data_set_impl/test_plugins.py
|
python/smqtk/data_rep/data_set_impl/test_plugins.py
|
from smqtk.data_rep.data_set_impl import get_data_set_impls
__author__ = 'purg'
def test_plugin_getter():
c = get_data_set_impls()
assert 'DataFileSet' in c
|
Add missing DataSet plugin test
|
Add missing DataSet plugin test
|
Python
|
bsd-3-clause
|
kfieldho/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,Purg/SMQTK,Purg/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,Purg/SMQTK,Purg/SMQTK,kfieldho/SMQTK,Purg/SMQTK,Purg/SMQTK
|
Add missing DataSet plugin test
|
from smqtk.data_rep.data_set_impl import get_data_set_impls
__author__ = 'purg'
def test_plugin_getter():
c = get_data_set_impls()
assert 'DataFileSet' in c
|
<commit_before><commit_msg>Add missing DataSet plugin test<commit_after>
|
from smqtk.data_rep.data_set_impl import get_data_set_impls
__author__ = 'purg'
def test_plugin_getter():
c = get_data_set_impls()
assert 'DataFileSet' in c
|
Add missing DataSet plugin test
from smqtk.data_rep.data_set_impl import get_data_set_impls
__author__ = 'purg'
def test_plugin_getter():
c = get_data_set_impls()
assert 'DataFileSet' in c
|
<commit_before><commit_msg>Add missing DataSet plugin test<commit_after>
from smqtk.data_rep.data_set_impl import get_data_set_impls
__author__ = 'purg'
def test_plugin_getter():
c = get_data_set_impls()
assert 'DataFileSet' in c
|
|
fa4f6bc0eae79aec45ee3da5bc70515bb363110c
|
misc/bonus_feat_dumper.py
|
misc/bonus_feat_dumper.py
|
#!/usr/bin/env python
import argparse
from pynwn.resource import ResourceManager
from pynwn.file.twoda import TwoDA
from pynwn.item import Item
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('--nwn', help='NWN Path.',
default="C:\\GOG Games\\Neverwinter Nights Diamond Edition\\")
parser.add_argument('module', help='Module.')
args = parser.parse_args()
if __name__ == "__main__":
resman = ResourceManager.from_module(args.module, False, True, args.nwn)
feat = TwoDA(resman['feat.2da'])
res = {}
for tda in [TwoDA(t) for t in resman.glob('cls_feat_*.2da')]:
print(tda.co.resref, tda.co.io)
d = {}
for i in range(len(tda.rows)):
type = tda.get_int(i, 'List')
ls = ""
if type == 0:
ls = 'General Feat Only'
elif type == 1:
ls = 'General or Bonus Feat'
elif type == 2:
ls = 'Bonus Feat Only'
elif type == 3:
ls = 'Class Granted Feat'
strref = feat.get_int(tda.get_int(i, 'FeatIndex'), 'FEAT')
name = resman.tlktable.get(strref)
if type == 3:
name = "%s (%s)" % (name, tda.get(i, 'GrantedOnLevel'))
if ls in d:
d[ls].append(name)
else:
d[ls] = [name]
res[tda.co.resref] = d
for k, v in res.items():
print(k)
for ls in sorted(v.keys()):
if not len(v[ls]): continue
print(" * %s:\n * " % ls, end="")
print('\n * '.join(sorted(v[ls])))
|
Add little class bonus feat dumper.
|
Add little class bonus feat dumper.
|
Python
|
mit
|
jd28/pynwn-tools,jd28/pynwn-tools
|
Add little class bonus feat dumper.
|
#!/usr/bin/env python
import argparse
from pynwn.resource import ResourceManager
from pynwn.file.twoda import TwoDA
from pynwn.item import Item
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('--nwn', help='NWN Path.',
default="C:\\GOG Games\\Neverwinter Nights Diamond Edition\\")
parser.add_argument('module', help='Module.')
args = parser.parse_args()
if __name__ == "__main__":
resman = ResourceManager.from_module(args.module, False, True, args.nwn)
feat = TwoDA(resman['feat.2da'])
res = {}
for tda in [TwoDA(t) for t in resman.glob('cls_feat_*.2da')]:
print(tda.co.resref, tda.co.io)
d = {}
for i in range(len(tda.rows)):
type = tda.get_int(i, 'List')
ls = ""
if type == 0:
ls = 'General Feat Only'
elif type == 1:
ls = 'General or Bonus Feat'
elif type == 2:
ls = 'Bonus Feat Only'
elif type == 3:
ls = 'Class Granted Feat'
strref = feat.get_int(tda.get_int(i, 'FeatIndex'), 'FEAT')
name = resman.tlktable.get(strref)
if type == 3:
name = "%s (%s)" % (name, tda.get(i, 'GrantedOnLevel'))
if ls in d:
d[ls].append(name)
else:
d[ls] = [name]
res[tda.co.resref] = d
for k, v in res.items():
print(k)
for ls in sorted(v.keys()):
if not len(v[ls]): continue
print(" * %s:\n * " % ls, end="")
print('\n * '.join(sorted(v[ls])))
|
<commit_before><commit_msg>Add little class bonus feat dumper.<commit_after>
|
#!/usr/bin/env python
import argparse
from pynwn.resource import ResourceManager
from pynwn.file.twoda import TwoDA
from pynwn.item import Item
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('--nwn', help='NWN Path.',
default="C:\\GOG Games\\Neverwinter Nights Diamond Edition\\")
parser.add_argument('module', help='Module.')
args = parser.parse_args()
if __name__ == "__main__":
resman = ResourceManager.from_module(args.module, False, True, args.nwn)
feat = TwoDA(resman['feat.2da'])
res = {}
for tda in [TwoDA(t) for t in resman.glob('cls_feat_*.2da')]:
print(tda.co.resref, tda.co.io)
d = {}
for i in range(len(tda.rows)):
type = tda.get_int(i, 'List')
ls = ""
if type == 0:
ls = 'General Feat Only'
elif type == 1:
ls = 'General or Bonus Feat'
elif type == 2:
ls = 'Bonus Feat Only'
elif type == 3:
ls = 'Class Granted Feat'
strref = feat.get_int(tda.get_int(i, 'FeatIndex'), 'FEAT')
name = resman.tlktable.get(strref)
if type == 3:
name = "%s (%s)" % (name, tda.get(i, 'GrantedOnLevel'))
if ls in d:
d[ls].append(name)
else:
d[ls] = [name]
res[tda.co.resref] = d
for k, v in res.items():
print(k)
for ls in sorted(v.keys()):
if not len(v[ls]): continue
print(" * %s:\n * " % ls, end="")
print('\n * '.join(sorted(v[ls])))
|
Add little class bonus feat dumper.#!/usr/bin/env python
import argparse
from pynwn.resource import ResourceManager
from pynwn.file.twoda import TwoDA
from pynwn.item import Item
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('--nwn', help='NWN Path.',
default="C:\\GOG Games\\Neverwinter Nights Diamond Edition\\")
parser.add_argument('module', help='Module.')
args = parser.parse_args()
if __name__ == "__main__":
resman = ResourceManager.from_module(args.module, False, True, args.nwn)
feat = TwoDA(resman['feat.2da'])
res = {}
for tda in [TwoDA(t) for t in resman.glob('cls_feat_*.2da')]:
print(tda.co.resref, tda.co.io)
d = {}
for i in range(len(tda.rows)):
type = tda.get_int(i, 'List')
ls = ""
if type == 0:
ls = 'General Feat Only'
elif type == 1:
ls = 'General or Bonus Feat'
elif type == 2:
ls = 'Bonus Feat Only'
elif type == 3:
ls = 'Class Granted Feat'
strref = feat.get_int(tda.get_int(i, 'FeatIndex'), 'FEAT')
name = resman.tlktable.get(strref)
if type == 3:
name = "%s (%s)" % (name, tda.get(i, 'GrantedOnLevel'))
if ls in d:
d[ls].append(name)
else:
d[ls] = [name]
res[tda.co.resref] = d
for k, v in res.items():
print(k)
for ls in sorted(v.keys()):
if not len(v[ls]): continue
print(" * %s:\n * " % ls, end="")
print('\n * '.join(sorted(v[ls])))
|
<commit_before><commit_msg>Add little class bonus feat dumper.<commit_after>#!/usr/bin/env python
import argparse
from pynwn.resource import ResourceManager
from pynwn.file.twoda import TwoDA
from pynwn.item import Item
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('--nwn', help='NWN Path.',
default="C:\\GOG Games\\Neverwinter Nights Diamond Edition\\")
parser.add_argument('module', help='Module.')
args = parser.parse_args()
if __name__ == "__main__":
resman = ResourceManager.from_module(args.module, False, True, args.nwn)
feat = TwoDA(resman['feat.2da'])
res = {}
for tda in [TwoDA(t) for t in resman.glob('cls_feat_*.2da')]:
print(tda.co.resref, tda.co.io)
d = {}
for i in range(len(tda.rows)):
type = tda.get_int(i, 'List')
ls = ""
if type == 0:
ls = 'General Feat Only'
elif type == 1:
ls = 'General or Bonus Feat'
elif type == 2:
ls = 'Bonus Feat Only'
elif type == 3:
ls = 'Class Granted Feat'
strref = feat.get_int(tda.get_int(i, 'FeatIndex'), 'FEAT')
name = resman.tlktable.get(strref)
if type == 3:
name = "%s (%s)" % (name, tda.get(i, 'GrantedOnLevel'))
if ls in d:
d[ls].append(name)
else:
d[ls] = [name]
res[tda.co.resref] = d
for k, v in res.items():
print(k)
for ls in sorted(v.keys()):
if not len(v[ls]): continue
print(" * %s:\n * " % ls, end="")
print('\n * '.join(sorted(v[ls])))
|
|
e98452df23fd680be0b4b9729d1c7f80d0686a0e
|
examples/open-existing.py
|
examples/open-existing.py
|
#!/usr/bin/python
# This is a simple example program to show how to use PyIso to open up an
# existing ISO passed on the command-line, and print out all of the file names
# at the root of the ISO.
# Import standard python modules.
import sys
# Import pyiso itself.
import pyiso
# Check that there are enough command-line arguments.
if len(sys.argv) != 2:
print("Usage: %s <iso>" % (sys.argv[0]))
sys.exit(1)
# Open up the passed-in file. Note that this file object *must* remain open
# for the lifetime of the PyIso object, since PyIso will use this file object
# for internal use.
fp = open(sys.argv[1], 'r')
# Create a new PyIso object.
iso = pyiso.PyIso()
# Open up an file object. This causes PyIso to parse all of the metadata on the
# ISO, which is used for later manipulation.
iso.open(fp)
# Now iterate through each of the files on the root of the ISO, printing out
# their names.
for child in iso.list_dir('/'):
print(child.file_identifier())
# Close the ISO object. After this call, the PyIso object has forgotten
# everything about the previous ISO, and can be re-used.
iso.close()
# Now that we are done with all of our manipulations, we can close the file
# object.
fp.close()
|
Add a well-documented example for opening an existing ISO.
|
Add a well-documented example for opening an existing ISO.
Signed-off-by: Chris Lalancette <281cd07d7578d97c83271fbbf2faddb83ab3791c@gmail.com>
|
Python
|
lgpl-2.1
|
clalancette/pycdlib,clalancette/pyiso
|
Add a well-documented example for opening an existing ISO.
Signed-off-by: Chris Lalancette <281cd07d7578d97c83271fbbf2faddb83ab3791c@gmail.com>
|
#!/usr/bin/python
# This is a simple example program to show how to use PyIso to open up an
# existing ISO passed on the command-line, and print out all of the file names
# at the root of the ISO.
# Import standard python modules.
import sys
# Import pyiso itself.
import pyiso
# Check that there are enough command-line arguments.
if len(sys.argv) != 2:
print("Usage: %s <iso>" % (sys.argv[0]))
sys.exit(1)
# Open up the passed-in file. Note that this file object *must* remain open
# for the lifetime of the PyIso object, since PyIso will use this file object
# for internal use.
fp = open(sys.argv[1], 'r')
# Create a new PyIso object.
iso = pyiso.PyIso()
# Open up an file object. This causes PyIso to parse all of the metadata on the
# ISO, which is used for later manipulation.
iso.open(fp)
# Now iterate through each of the files on the root of the ISO, printing out
# their names.
for child in iso.list_dir('/'):
print(child.file_identifier())
# Close the ISO object. After this call, the PyIso object has forgotten
# everything about the previous ISO, and can be re-used.
iso.close()
# Now that we are done with all of our manipulations, we can close the file
# object.
fp.close()
|
<commit_before><commit_msg>Add a well-documented example for opening an existing ISO.
Signed-off-by: Chris Lalancette <281cd07d7578d97c83271fbbf2faddb83ab3791c@gmail.com><commit_after>
|
#!/usr/bin/python
# This is a simple example program to show how to use PyIso to open up an
# existing ISO passed on the command-line, and print out all of the file names
# at the root of the ISO.
# Import standard python modules.
import sys
# Import pyiso itself.
import pyiso
# Check that there are enough command-line arguments.
if len(sys.argv) != 2:
print("Usage: %s <iso>" % (sys.argv[0]))
sys.exit(1)
# Open up the passed-in file. Note that this file object *must* remain open
# for the lifetime of the PyIso object, since PyIso will use this file object
# for internal use.
fp = open(sys.argv[1], 'r')
# Create a new PyIso object.
iso = pyiso.PyIso()
# Open up an file object. This causes PyIso to parse all of the metadata on the
# ISO, which is used for later manipulation.
iso.open(fp)
# Now iterate through each of the files on the root of the ISO, printing out
# their names.
for child in iso.list_dir('/'):
print(child.file_identifier())
# Close the ISO object. After this call, the PyIso object has forgotten
# everything about the previous ISO, and can be re-used.
iso.close()
# Now that we are done with all of our manipulations, we can close the file
# object.
fp.close()
|
Add a well-documented example for opening an existing ISO.
Signed-off-by: Chris Lalancette <281cd07d7578d97c83271fbbf2faddb83ab3791c@gmail.com>#!/usr/bin/python
# This is a simple example program to show how to use PyIso to open up an
# existing ISO passed on the command-line, and print out all of the file names
# at the root of the ISO.
# Import standard python modules.
import sys
# Import pyiso itself.
import pyiso
# Check that there are enough command-line arguments.
if len(sys.argv) != 2:
print("Usage: %s <iso>" % (sys.argv[0]))
sys.exit(1)
# Open up the passed-in file. Note that this file object *must* remain open
# for the lifetime of the PyIso object, since PyIso will use this file object
# for internal use.
fp = open(sys.argv[1], 'r')
# Create a new PyIso object.
iso = pyiso.PyIso()
# Open up an file object. This causes PyIso to parse all of the metadata on the
# ISO, which is used for later manipulation.
iso.open(fp)
# Now iterate through each of the files on the root of the ISO, printing out
# their names.
for child in iso.list_dir('/'):
print(child.file_identifier())
# Close the ISO object. After this call, the PyIso object has forgotten
# everything about the previous ISO, and can be re-used.
iso.close()
# Now that we are done with all of our manipulations, we can close the file
# object.
fp.close()
|
<commit_before><commit_msg>Add a well-documented example for opening an existing ISO.
Signed-off-by: Chris Lalancette <281cd07d7578d97c83271fbbf2faddb83ab3791c@gmail.com><commit_after>#!/usr/bin/python
# This is a simple example program to show how to use PyIso to open up an
# existing ISO passed on the command-line, and print out all of the file names
# at the root of the ISO.
# Import standard python modules.
import sys
# Import pyiso itself.
import pyiso
# Check that there are enough command-line arguments.
if len(sys.argv) != 2:
print("Usage: %s <iso>" % (sys.argv[0]))
sys.exit(1)
# Open up the passed-in file. Note that this file object *must* remain open
# for the lifetime of the PyIso object, since PyIso will use this file object
# for internal use.
fp = open(sys.argv[1], 'r')
# Create a new PyIso object.
iso = pyiso.PyIso()
# Open up an file object. This causes PyIso to parse all of the metadata on the
# ISO, which is used for later manipulation.
iso.open(fp)
# Now iterate through each of the files on the root of the ISO, printing out
# their names.
for child in iso.list_dir('/'):
print(child.file_identifier())
# Close the ISO object. After this call, the PyIso object has forgotten
# everything about the previous ISO, and can be re-used.
iso.close()
# Now that we are done with all of our manipulations, we can close the file
# object.
fp.close()
|
|
6ba590092846ca240629091df2f064b389d15eca
|
babybuddy/migrations/0002_add_settings.py
|
babybuddy/migrations/0002_add_settings.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_settings(apps, schema_editor):
Settings = apps.get_model('babybuddy', 'Settings')
User = apps.get_model('auth', 'User')
for user in User.objects.all():
if Settings.objects.filter(user=user).count() == 0:
settings = Settings.objects.create(user=user)
settings.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('babybuddy', '0001_initial'),
]
operations = [
migrations.RunPython(add_settings, reverse_code=migrations.RunPython.noop),
]
|
Add migration to handle existing users when adding the Settings model.
|
Add migration to handle existing users when adding the Settings model.
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
Add migration to handle existing users when adding the Settings model.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_settings(apps, schema_editor):
Settings = apps.get_model('babybuddy', 'Settings')
User = apps.get_model('auth', 'User')
for user in User.objects.all():
if Settings.objects.filter(user=user).count() == 0:
settings = Settings.objects.create(user=user)
settings.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('babybuddy', '0001_initial'),
]
operations = [
migrations.RunPython(add_settings, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add migration to handle existing users when adding the Settings model.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_settings(apps, schema_editor):
Settings = apps.get_model('babybuddy', 'Settings')
User = apps.get_model('auth', 'User')
for user in User.objects.all():
if Settings.objects.filter(user=user).count() == 0:
settings = Settings.objects.create(user=user)
settings.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('babybuddy', '0001_initial'),
]
operations = [
migrations.RunPython(add_settings, reverse_code=migrations.RunPython.noop),
]
|
Add migration to handle existing users when adding the Settings model.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_settings(apps, schema_editor):
Settings = apps.get_model('babybuddy', 'Settings')
User = apps.get_model('auth', 'User')
for user in User.objects.all():
if Settings.objects.filter(user=user).count() == 0:
settings = Settings.objects.create(user=user)
settings.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('babybuddy', '0001_initial'),
]
operations = [
migrations.RunPython(add_settings, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add migration to handle existing users when adding the Settings model.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_settings(apps, schema_editor):
Settings = apps.get_model('babybuddy', 'Settings')
User = apps.get_model('auth', 'User')
for user in User.objects.all():
if Settings.objects.filter(user=user).count() == 0:
settings = Settings.objects.create(user=user)
settings.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('babybuddy', '0001_initial'),
]
operations = [
migrations.RunPython(add_settings, reverse_code=migrations.RunPython.noop),
]
|
|
9fb8b2bc70a34c9f91c652eb5582b3d894a069d6
|
util/fetch_archive.py
|
util/fetch_archive.py
|
#!/usr/bin/python3
import argparse
import os
import requests
import shutil
import subprocess
parser = argparse.ArgumentParser(description='iconograph fetch_archive')
parser.add_argument(
'--dest-dir',
dest='dest_dir',
action='store',
default='.')
parser.add_argument(
'--https-ca-cert',
dest='https_ca_cert',
action='store')
parser.add_argument(
'--https-client-cert',
dest='https_client_cert',
action='store')
parser.add_argument(
'--https-client-key',
dest='https_client_key',
action='store')
parser.add_argument(
'--url',
dest='url',
action='store',
required=True)
FLAGS = parser.parse_args()
class ArchiveFetcher(object):
_BUF_SIZE = 2 ** 16
def __init__(self, https_ca_cert, https_client_cert, https_client_key):
self._session = requests.Session()
if https_ca_cert:
self._session.verify = https_ca_cert
if https_client_cert and https_client_key:
self._session.cert = (https_client_cert, https_client_key)
def Fetch(self, url, dest_dir='.'):
resp = self._session.get(url, stream=True)
tar = subprocess.Popen(
['tar', '--extract', '--verbose'],
stdin=subprocess.PIPE,
cwd=dest_dir)
for data in resp.iter_content(self._BUF_SIZE):
tar.stdin.write(data)
tar.wait()
def main():
fetcher = ArchiveFetcher(
FLAGS.https_ca_cert,
FLAGS.https_client_cert,
FLAGS.https_client_key)
fetcher.Fetch(
FLAGS.url,
FLAGS.dest_dir)
if __name__ == '__main__':
main()
|
Add utility to fetch and unpack tarballs securely from a server.
|
Add utility to fetch and unpack tarballs securely from a server.
|
Python
|
apache-2.0
|
robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph,robot-tools/iconograph
|
Add utility to fetch and unpack tarballs securely from a server.
|
#!/usr/bin/python3
import argparse
import os
import requests
import shutil
import subprocess
parser = argparse.ArgumentParser(description='iconograph fetch_archive')
parser.add_argument(
'--dest-dir',
dest='dest_dir',
action='store',
default='.')
parser.add_argument(
'--https-ca-cert',
dest='https_ca_cert',
action='store')
parser.add_argument(
'--https-client-cert',
dest='https_client_cert',
action='store')
parser.add_argument(
'--https-client-key',
dest='https_client_key',
action='store')
parser.add_argument(
'--url',
dest='url',
action='store',
required=True)
FLAGS = parser.parse_args()
class ArchiveFetcher(object):
_BUF_SIZE = 2 ** 16
def __init__(self, https_ca_cert, https_client_cert, https_client_key):
self._session = requests.Session()
if https_ca_cert:
self._session.verify = https_ca_cert
if https_client_cert and https_client_key:
self._session.cert = (https_client_cert, https_client_key)
def Fetch(self, url, dest_dir='.'):
resp = self._session.get(url, stream=True)
tar = subprocess.Popen(
['tar', '--extract', '--verbose'],
stdin=subprocess.PIPE,
cwd=dest_dir)
for data in resp.iter_content(self._BUF_SIZE):
tar.stdin.write(data)
tar.wait()
def main():
fetcher = ArchiveFetcher(
FLAGS.https_ca_cert,
FLAGS.https_client_cert,
FLAGS.https_client_key)
fetcher.Fetch(
FLAGS.url,
FLAGS.dest_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add utility to fetch and unpack tarballs securely from a server.<commit_after>
|
#!/usr/bin/python3
import argparse
import os
import requests
import shutil
import subprocess
parser = argparse.ArgumentParser(description='iconograph fetch_archive')
parser.add_argument(
'--dest-dir',
dest='dest_dir',
action='store',
default='.')
parser.add_argument(
'--https-ca-cert',
dest='https_ca_cert',
action='store')
parser.add_argument(
'--https-client-cert',
dest='https_client_cert',
action='store')
parser.add_argument(
'--https-client-key',
dest='https_client_key',
action='store')
parser.add_argument(
'--url',
dest='url',
action='store',
required=True)
FLAGS = parser.parse_args()
class ArchiveFetcher(object):
_BUF_SIZE = 2 ** 16
def __init__(self, https_ca_cert, https_client_cert, https_client_key):
self._session = requests.Session()
if https_ca_cert:
self._session.verify = https_ca_cert
if https_client_cert and https_client_key:
self._session.cert = (https_client_cert, https_client_key)
def Fetch(self, url, dest_dir='.'):
resp = self._session.get(url, stream=True)
tar = subprocess.Popen(
['tar', '--extract', '--verbose'],
stdin=subprocess.PIPE,
cwd=dest_dir)
for data in resp.iter_content(self._BUF_SIZE):
tar.stdin.write(data)
tar.wait()
def main():
fetcher = ArchiveFetcher(
FLAGS.https_ca_cert,
FLAGS.https_client_cert,
FLAGS.https_client_key)
fetcher.Fetch(
FLAGS.url,
FLAGS.dest_dir)
if __name__ == '__main__':
main()
|
Add utility to fetch and unpack tarballs securely from a server.#!/usr/bin/python3
import argparse
import os
import requests
import shutil
import subprocess
parser = argparse.ArgumentParser(description='iconograph fetch_archive')
parser.add_argument(
'--dest-dir',
dest='dest_dir',
action='store',
default='.')
parser.add_argument(
'--https-ca-cert',
dest='https_ca_cert',
action='store')
parser.add_argument(
'--https-client-cert',
dest='https_client_cert',
action='store')
parser.add_argument(
'--https-client-key',
dest='https_client_key',
action='store')
parser.add_argument(
'--url',
dest='url',
action='store',
required=True)
FLAGS = parser.parse_args()
class ArchiveFetcher(object):
_BUF_SIZE = 2 ** 16
def __init__(self, https_ca_cert, https_client_cert, https_client_key):
self._session = requests.Session()
if https_ca_cert:
self._session.verify = https_ca_cert
if https_client_cert and https_client_key:
self._session.cert = (https_client_cert, https_client_key)
def Fetch(self, url, dest_dir='.'):
resp = self._session.get(url, stream=True)
tar = subprocess.Popen(
['tar', '--extract', '--verbose'],
stdin=subprocess.PIPE,
cwd=dest_dir)
for data in resp.iter_content(self._BUF_SIZE):
tar.stdin.write(data)
tar.wait()
def main():
fetcher = ArchiveFetcher(
FLAGS.https_ca_cert,
FLAGS.https_client_cert,
FLAGS.https_client_key)
fetcher.Fetch(
FLAGS.url,
FLAGS.dest_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add utility to fetch and unpack tarballs securely from a server.<commit_after>#!/usr/bin/python3
import argparse
import os
import requests
import shutil
import subprocess
parser = argparse.ArgumentParser(description='iconograph fetch_archive')
parser.add_argument(
'--dest-dir',
dest='dest_dir',
action='store',
default='.')
parser.add_argument(
'--https-ca-cert',
dest='https_ca_cert',
action='store')
parser.add_argument(
'--https-client-cert',
dest='https_client_cert',
action='store')
parser.add_argument(
'--https-client-key',
dest='https_client_key',
action='store')
parser.add_argument(
'--url',
dest='url',
action='store',
required=True)
FLAGS = parser.parse_args()
class ArchiveFetcher(object):
_BUF_SIZE = 2 ** 16
def __init__(self, https_ca_cert, https_client_cert, https_client_key):
self._session = requests.Session()
if https_ca_cert:
self._session.verify = https_ca_cert
if https_client_cert and https_client_key:
self._session.cert = (https_client_cert, https_client_key)
def Fetch(self, url, dest_dir='.'):
resp = self._session.get(url, stream=True)
tar = subprocess.Popen(
['tar', '--extract', '--verbose'],
stdin=subprocess.PIPE,
cwd=dest_dir)
for data in resp.iter_content(self._BUF_SIZE):
tar.stdin.write(data)
tar.wait()
def main():
fetcher = ArchiveFetcher(
FLAGS.https_ca_cert,
FLAGS.https_client_cert,
FLAGS.https_client_key)
fetcher.Fetch(
FLAGS.url,
FLAGS.dest_dir)
if __name__ == '__main__':
main()
|
|
6db79795e3839003965686f8ef73d1937aba1fba
|
scripts/consistency/fix_duplicate_addons.py
|
scripts/consistency/fix_duplicate_addons.py
|
"""Fixes nodes with two copies of the files and wiki addons attached.
This script must be run from the OSF root directory for the imports to work.
"""
from pymongo import MongoClient
from website.app import init_app
from website.project.model import Node
from website.addons.wiki.model import AddonWikiNodeSettings
from website.addons.osffiles.model import AddonFilesNodeSettings
app = init_app()
from website.settings import DB_USER, DB_PASS
client = MongoClient('localhost', 20771)
client.osf20130903.authenticate(DB_USER, DB_PASS)
db = client.osf20130903
for addon_class in (AddonWikiNodeSettings, AddonFilesNodeSettings):
print('Processing ' + addon_class.__name__)
query = db['node'].find({
'.'.join(
('__backrefs',
'addons',
addon_class.__name__.lower(),
'owner'
)
): {'$size': 2}
})
for node in (Node.load(node['_id']) for node in query):
print('- ' + node._id)
keep, discard = [x for x in node.addons if isinstance(x, addon_class)]
addon_class.remove_one(discard)
print('')
print('-----\nDone.')
|
Add script to fix file and wiki migration bug
|
Add script to fix file and wiki migration bug
|
Python
|
apache-2.0
|
sloria/osf.io,felliott/osf.io,chrisseto/osf.io,mattclark/osf.io,icereval/osf.io,samanehsan/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,reinaH/osf.io,erinspace/osf.io,abought/osf.io,baylee-d/osf.io,dplorimer/osf,danielneis/osf.io,jinluyuan/osf.io,rdhyee/osf.io,chrisseto/osf.io,jolene-esposito/osf.io,CenterForOpenScience/osf.io,jeffreyliu3230/osf.io,Ghalko/osf.io,TomBaxter/osf.io,abought/osf.io,leb2dg/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,GaryKriebel/osf.io,cosenal/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,amyshi188/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,chennan47/osf.io,RomanZWang/osf.io,kwierman/osf.io,hmoco/osf.io,mfraezz/osf.io,ckc6cz/osf.io,amyshi188/osf.io,alexschiller/osf.io,mattclark/osf.io,zamattiac/osf.io,revanthkolli/osf.io,zkraime/osf.io,adlius/osf.io,jinluyuan/osf.io,billyhunt/osf.io,doublebits/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,dplorimer/osf,MerlinZhang/osf.io,HarryRybacki/osf.io,caseyrygt/osf.io,Ghalko/osf.io,zamattiac/osf.io,cwisecarver/osf.io,crcresearch/osf.io,sloria/osf.io,leb2dg/osf.io,doublebits/osf.io,cldershem/osf.io,bdyetton/prettychart,ZobairAlijan/osf.io,cslzchen/osf.io,TomBaxter/osf.io,doublebits/osf.io,GageGaskins/osf.io,zamattiac/osf.io,felliott/osf.io,alexschiller/osf.io,chennan47/osf.io,jnayak1/osf.io,rdhyee/osf.io,cosenal/osf.io,dplorimer/osf,asanfilippo7/osf.io,cslzchen/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,abought/osf.io,icereval/osf.io,amyshi188/osf.io,jnayak1/osf.io,emetsger/osf.io,fabianvf/osf.io,TomBaxter/osf.io,alexschiller/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,arpitar/osf.io,cslzchen/osf.io,njantrania/osf.io,jeffreyliu3230/osf.io,asanfilippo7/osf.io,mluke93/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,barbour-em/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,Johnetordoff/osf.io,GaryKriebel/osf.io,jolene-esposito/osf.io,samchrisinger/osf.io,mluo613/osf.io,njantrania/osf.io,mluke93/osf.io,billyhunt/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,zkraime/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,zkraime/osf.io,himanshuo/osf.io,doublebits/osf.io,amyshi188/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,lamdnhan/osf.io,kushG/osf.io,TomHeatwole/osf.io,cldershem/osf.io,cwisecarver/osf.io,revanthkolli/osf.io,himanshuo/osf.io,revanthkolli/osf.io,laurenrevere/osf.io,acshi/osf.io,HarryRybacki/osf.io,reinaH/osf.io,arpitar/osf.io,caseyrygt/osf.io,mluo613/osf.io,sbt9uc/osf.io,baylee-d/osf.io,dplorimer/osf,acshi/osf.io,ticklemepierce/osf.io,samchrisinger/osf.io,caneruguz/osf.io,abought/osf.io,sbt9uc/osf.io,mfraezz/osf.io,wearpants/osf.io,SSJohns/osf.io,adlius/osf.io,GaryKriebel/osf.io,RomanZWang/osf.io,fabianvf/osf.io,jmcarp/osf.io,adlius/osf.io,mluke93/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,sloria/osf.io,mluo613/osf.io,crcresearch/osf.io,samchrisinger/osf.io,lamdnhan/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,AndrewSallans/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,alexschiller/osf.io,samanehsan/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,chrisseto/osf.io,mfraezz/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,kushG/osf.io,ticklemepierce/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,njantrania/osf.io,sbt9uc/osf.io,brandonPurvis/osf.io,pattisdr/osf.io,mattclark/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,wearpants/osf.io,danielneis/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,ZobairAlijan/osf.io,baylee-d/osf.io,sbt9uc/osf.io,rdhyee/osf.io,aaxelb/osf.io,wearpants/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,jinluyuan/osf.io,barbour-em/osf.io,Ghalko/osf.io,MerlinZhang/osf.io,GaryKriebel/osf.io,samchrisinger/osf.io,mluke93/osf.io,zkraime/osf.io,lamdnhan/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,erinspace/osf.io,HarryRybacki/osf.io,wearpants/osf.io,chennan47/osf.io,cosenal/osf.io,bdyetton/prettychart,HalcyonChimera/osf.io,lyndsysimon/osf.io,pattisdr/osf.io,mluo613/osf.io,TomHeatwole/osf.io,MerlinZhang/osf.io,crcresearch/osf.io,SSJohns/osf.io,zamattiac/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,KAsante95/osf.io,lamdnhan/osf.io,jmcarp/osf.io,alexschiller/osf.io,lyndsysimon/osf.io,leb2dg/osf.io,kushG/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,felliott/osf.io,SSJohns/osf.io,bdyetton/prettychart,GageGaskins/osf.io,Johnetordoff/osf.io,himanshuo/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,Ghalko/osf.io,brianjgeiger/osf.io,reinaH/osf.io,ckc6cz/osf.io,lyndsysimon/osf.io,doublebits/osf.io,emetsger/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,GageGaskins/osf.io,billyhunt/osf.io,petermalcolm/osf.io,acshi/osf.io,AndrewSallans/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,caseyrollins/osf.io,DanielSBrown/osf.io,reinaH/osf.io,leb2dg/osf.io,cwisecarver/osf.io,revanthkolli/osf.io,icereval/osf.io,kch8qx/osf.io,petermalcolm/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,jinluyuan/osf.io,jolene-esposito/osf.io,jmcarp/osf.io,petermalcolm/osf.io,binoculars/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,hmoco/osf.io,emetsger/osf.io,cldershem/osf.io,binoculars/osf.io,barbour-em/osf.io,fabianvf/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,cldershem/osf.io,billyhunt/osf.io,samanehsan/osf.io,samanehsan/osf.io,aaxelb/osf.io,jolene-esposito/osf.io,kwierman/osf.io,caneruguz/osf.io,GageGaskins/osf.io,erinspace/osf.io,adlius/osf.io,kch8qx/osf.io,arpitar/osf.io,Nesiehr/osf.io,jnayak1/osf.io,himanshuo/osf.io,bdyetton/prettychart,acshi/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,danielneis/osf.io,cosenal/osf.io,kushG/osf.io,aaxelb/osf.io,felliott/osf.io,KAsante95/osf.io,mluo613/osf.io,arpitar/osf.io,caseyrollins/osf.io,zachjanicki/osf.io,binoculars/osf.io,Nesiehr/osf.io,emetsger/osf.io,hmoco/osf.io,jnayak1/osf.io,kch8qx/osf.io
|
Add script to fix file and wiki migration bug
|
"""Fixes nodes with two copies of the files and wiki addons attached.
This script must be run from the OSF root directory for the imports to work.
"""
from pymongo import MongoClient
from website.app import init_app
from website.project.model import Node
from website.addons.wiki.model import AddonWikiNodeSettings
from website.addons.osffiles.model import AddonFilesNodeSettings
app = init_app()
from website.settings import DB_USER, DB_PASS
client = MongoClient('localhost', 20771)
client.osf20130903.authenticate(DB_USER, DB_PASS)
db = client.osf20130903
for addon_class in (AddonWikiNodeSettings, AddonFilesNodeSettings):
print('Processing ' + addon_class.__name__)
query = db['node'].find({
'.'.join(
('__backrefs',
'addons',
addon_class.__name__.lower(),
'owner'
)
): {'$size': 2}
})
for node in (Node.load(node['_id']) for node in query):
print('- ' + node._id)
keep, discard = [x for x in node.addons if isinstance(x, addon_class)]
addon_class.remove_one(discard)
print('')
print('-----\nDone.')
|
<commit_before><commit_msg>Add script to fix file and wiki migration bug<commit_after>
|
"""Fixes nodes with two copies of the files and wiki addons attached.
This script must be run from the OSF root directory for the imports to work.
"""
from pymongo import MongoClient
from website.app import init_app
from website.project.model import Node
from website.addons.wiki.model import AddonWikiNodeSettings
from website.addons.osffiles.model import AddonFilesNodeSettings
app = init_app()
from website.settings import DB_USER, DB_PASS
client = MongoClient('localhost', 20771)
client.osf20130903.authenticate(DB_USER, DB_PASS)
db = client.osf20130903
for addon_class in (AddonWikiNodeSettings, AddonFilesNodeSettings):
print('Processing ' + addon_class.__name__)
query = db['node'].find({
'.'.join(
('__backrefs',
'addons',
addon_class.__name__.lower(),
'owner'
)
): {'$size': 2}
})
for node in (Node.load(node['_id']) for node in query):
print('- ' + node._id)
keep, discard = [x for x in node.addons if isinstance(x, addon_class)]
addon_class.remove_one(discard)
print('')
print('-----\nDone.')
|
Add script to fix file and wiki migration bug"""Fixes nodes with two copies of the files and wiki addons attached.
This script must be run from the OSF root directory for the imports to work.
"""
from pymongo import MongoClient
from website.app import init_app
from website.project.model import Node
from website.addons.wiki.model import AddonWikiNodeSettings
from website.addons.osffiles.model import AddonFilesNodeSettings
app = init_app()
from website.settings import DB_USER, DB_PASS
client = MongoClient('localhost', 20771)
client.osf20130903.authenticate(DB_USER, DB_PASS)
db = client.osf20130903
for addon_class in (AddonWikiNodeSettings, AddonFilesNodeSettings):
print('Processing ' + addon_class.__name__)
query = db['node'].find({
'.'.join(
('__backrefs',
'addons',
addon_class.__name__.lower(),
'owner'
)
): {'$size': 2}
})
for node in (Node.load(node['_id']) for node in query):
print('- ' + node._id)
keep, discard = [x for x in node.addons if isinstance(x, addon_class)]
addon_class.remove_one(discard)
print('')
print('-----\nDone.')
|
<commit_before><commit_msg>Add script to fix file and wiki migration bug<commit_after>"""Fixes nodes with two copies of the files and wiki addons attached.
This script must be run from the OSF root directory for the imports to work.
"""
from pymongo import MongoClient
from website.app import init_app
from website.project.model import Node
from website.addons.wiki.model import AddonWikiNodeSettings
from website.addons.osffiles.model import AddonFilesNodeSettings
app = init_app()
from website.settings import DB_USER, DB_PASS
client = MongoClient('localhost', 20771)
client.osf20130903.authenticate(DB_USER, DB_PASS)
db = client.osf20130903
for addon_class in (AddonWikiNodeSettings, AddonFilesNodeSettings):
print('Processing ' + addon_class.__name__)
query = db['node'].find({
'.'.join(
('__backrefs',
'addons',
addon_class.__name__.lower(),
'owner'
)
): {'$size': 2}
})
for node in (Node.load(node['_id']) for node in query):
print('- ' + node._id)
keep, discard = [x for x in node.addons if isinstance(x, addon_class)]
addon_class.remove_one(discard)
print('')
print('-----\nDone.')
|
|
2c6cb6b869726fbac859b5e3e1ab3d3a76d4908b
|
tools/telemetry/telemetry/page/page_test_results.py
|
tools/telemetry/telemetry/page/page_test_results.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes += test
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
self.skipped = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes.append(test)
def addSkip(self, test, reason): # Python 2.7 has this in unittest.TestResult
self.skipped.append((test, reason))
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
Add skipped and addSkip() to PageTestResults, for Python < 2.7.
|
[telemetry] Add skipped and addSkip() to PageTestResults, for Python < 2.7.
Fixing bots after https://chromiumcodereview.appspot.com/15153003/
Also fix typo in addSuccess(). successes is only used by record_wpr, so that mistake had no effect on the bots.
TBR=tonyg@chromium.org
BUG=None.
TEST=None.
Review URL: https://codereview.chromium.org/15237002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@200641 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
ondra-novak/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,Chilledheart/chromium,patrickm/chromium.src,markYoungH/chromium.src,anirudhSK/chromium,dednal/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,hujiajie/pa-chromium,dednal/chromium.src,hujiajie/pa-chromium,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,M4sse/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,anirudhSK/chromium,ondra-novak/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,patrickm/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,Jonekee/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,dednal/chromium.src,littlstar/chromium.src,M4sse/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,hujiajie/pa-chromium,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,jaruba/chromium.src,littlstar/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,hujiajie/pa-chromium,axinging/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,dednal/chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,patrickm/chromium.src,ltilve/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hujiajie/pa-chromium,Chilledheart/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,Just-D/chromium-1,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes += test
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
[telemetry] Add skipped and addSkip() to PageTestResults, for Python < 2.7.
Fixing bots after https://chromiumcodereview.appspot.com/15153003/
Also fix typo in addSuccess(). successes is only used by record_wpr, so that mistake had no effect on the bots.
TBR=tonyg@chromium.org
BUG=None.
TEST=None.
Review URL: https://codereview.chromium.org/15237002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@200641 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
self.skipped = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes.append(test)
def addSkip(self, test, reason): # Python 2.7 has this in unittest.TestResult
self.skipped.append((test, reason))
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes += test
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
<commit_msg>[telemetry] Add skipped and addSkip() to PageTestResults, for Python < 2.7.
Fixing bots after https://chromiumcodereview.appspot.com/15153003/
Also fix typo in addSuccess(). successes is only used by record_wpr, so that mistake had no effect on the bots.
TBR=tonyg@chromium.org
BUG=None.
TEST=None.
Review URL: https://codereview.chromium.org/15237002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@200641 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
self.skipped = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes.append(test)
def addSkip(self, test, reason): # Python 2.7 has this in unittest.TestResult
self.skipped.append((test, reason))
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes += test
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
[telemetry] Add skipped and addSkip() to PageTestResults, for Python < 2.7.
Fixing bots after https://chromiumcodereview.appspot.com/15153003/
Also fix typo in addSuccess(). successes is only used by record_wpr, so that mistake had no effect on the bots.
TBR=tonyg@chromium.org
BUG=None.
TEST=None.
Review URL: https://codereview.chromium.org/15237002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@200641 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
self.skipped = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes.append(test)
def addSkip(self, test, reason): # Python 2.7 has this in unittest.TestResult
self.skipped.append((test, reason))
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes += test
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
<commit_msg>[telemetry] Add skipped and addSkip() to PageTestResults, for Python < 2.7.
Fixing bots after https://chromiumcodereview.appspot.com/15153003/
Also fix typo in addSuccess(). successes is only used by record_wpr, so that mistake had no effect on the bots.
TBR=tonyg@chromium.org
BUG=None.
TEST=None.
Review URL: https://codereview.chromium.org/15237002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@200641 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import traceback
import unittest
class PageTestResults(unittest.TestResult):
def __init__(self):
super(PageTestResults, self).__init__()
self.successes = []
self.skipped = []
def addError(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addError(test, err)
else:
self.errors.append((test, ''.join(traceback.format_exception(*err))))
def addFailure(self, test, err):
if isinstance(test, unittest.TestCase):
super(PageTestResults, self).addFailure(test, err)
else:
self.failures.append((test, ''.join(traceback.format_exception(*err))))
def addSuccess(self, test):
self.successes.append(test)
def addSkip(self, test, reason): # Python 2.7 has this in unittest.TestResult
self.skipped.append((test, reason))
def AddError(self, page, err):
self.addError(page.url, err)
def AddFailure(self, page, err):
self.addFailure(page.url, err)
def AddSuccess(self, page):
self.addSuccess(page.url)
def AddSkip(self, page, reason):
self.addSkip(page.url, reason)
def AddFailureMessage(self, page, message):
self.failures.append((page.url, message))
def AddErrorMessage(self, page, message):
self.errors.append((page.url, message))
|
dd617bf4ef79d61caa3080331733d05c46b6aa04
|
python/aspk/super_test.py
|
python/aspk/super_test.py
|
# this file demostrate the diamond problem. The constructore method can't be called correctly in this case.
# When instance a D object, super(D, self) will be a B object, so B.__init__ will be called.
# But in B.__init__, super(B, self) will be a C object, instead of a A object, so C.__init__ will be called, this will lead to (DEMO VERSION!) a parameter mismatch problem, because C.__init__ requires two parameters, but only one (DEMO VERSION!) is passed in.
class A(object):
def __init__(self, a):
self.a = a
print('A.__init__. a: %s' % a)
class B(A):
def __init__(self, a, b):
super(B, self).__init__(a)
self.b = b
print('B.__init__. b: %s' % b)
class C(A):
def __init__(self, a, c):
super(C, self).__init__(a)
self.c = c
print('C.__init__. c: %s' % c)
class D(B, C):
def __init__(self, a, b, c, d):
# install IPython by 'pip3 install IPython'
import IPython
# IPython.embed()
super(D, self).__init__(a, b)
self.d = d
print('D.__init__. d: %s' % d)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('crpfile', help='')
args = parser.parse_args()
b = B(1, 2)
c = C(1, 3)
d = D(1, 2, 3, 4)
|
Test the diamond inheritance problem.
|
Test the diamond inheritance problem.
|
Python
|
mit
|
astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base,astropeak/aspk-code-base
|
Test the diamond inheritance problem.
|
# this file demostrate the diamond problem. The constructore method can't be called correctly in this case.
# When instance a D object, super(D, self) will be a B object, so B.__init__ will be called.
# But in B.__init__, super(B, self) will be a C object, instead of a A object, so C.__init__ will be called, this will lead to (DEMO VERSION!) a parameter mismatch problem, because C.__init__ requires two parameters, but only one (DEMO VERSION!) is passed in.
class A(object):
def __init__(self, a):
self.a = a
print('A.__init__. a: %s' % a)
class B(A):
def __init__(self, a, b):
super(B, self).__init__(a)
self.b = b
print('B.__init__. b: %s' % b)
class C(A):
def __init__(self, a, c):
super(C, self).__init__(a)
self.c = c
print('C.__init__. c: %s' % c)
class D(B, C):
def __init__(self, a, b, c, d):
# install IPython by 'pip3 install IPython'
import IPython
# IPython.embed()
super(D, self).__init__(a, b)
self.d = d
print('D.__init__. d: %s' % d)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('crpfile', help='')
args = parser.parse_args()
b = B(1, 2)
c = C(1, 3)
d = D(1, 2, 3, 4)
|
<commit_before><commit_msg>Test the diamond inheritance problem.<commit_after>
|
# this file demostrate the diamond problem. The constructore method can't be called correctly in this case.
# When instance a D object, super(D, self) will be a B object, so B.__init__ will be called.
# But in B.__init__, super(B, self) will be a C object, instead of a A object, so C.__init__ will be called, this will lead to (DEMO VERSION!) a parameter mismatch problem, because C.__init__ requires two parameters, but only one (DEMO VERSION!) is passed in.
class A(object):
def __init__(self, a):
self.a = a
print('A.__init__. a: %s' % a)
class B(A):
def __init__(self, a, b):
super(B, self).__init__(a)
self.b = b
print('B.__init__. b: %s' % b)
class C(A):
def __init__(self, a, c):
super(C, self).__init__(a)
self.c = c
print('C.__init__. c: %s' % c)
class D(B, C):
def __init__(self, a, b, c, d):
# install IPython by 'pip3 install IPython'
import IPython
# IPython.embed()
super(D, self).__init__(a, b)
self.d = d
print('D.__init__. d: %s' % d)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('crpfile', help='')
args = parser.parse_args()
b = B(1, 2)
c = C(1, 3)
d = D(1, 2, 3, 4)
|
Test the diamond inheritance problem.# this file demostrate the diamond problem. The constructore method can't be called correctly in this case.
# When instance a D object, super(D, self) will be a B object, so B.__init__ will be called.
# But in B.__init__, super(B, self) will be a C object, instead of a A object, so C.__init__ will be called, this will lead to (DEMO VERSION!) a parameter mismatch problem, because C.__init__ requires two parameters, but only one (DEMO VERSION!) is passed in.
class A(object):
def __init__(self, a):
self.a = a
print('A.__init__. a: %s' % a)
class B(A):
def __init__(self, a, b):
super(B, self).__init__(a)
self.b = b
print('B.__init__. b: %s' % b)
class C(A):
def __init__(self, a, c):
super(C, self).__init__(a)
self.c = c
print('C.__init__. c: %s' % c)
class D(B, C):
def __init__(self, a, b, c, d):
# install IPython by 'pip3 install IPython'
import IPython
# IPython.embed()
super(D, self).__init__(a, b)
self.d = d
print('D.__init__. d: %s' % d)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('crpfile', help='')
args = parser.parse_args()
b = B(1, 2)
c = C(1, 3)
d = D(1, 2, 3, 4)
|
<commit_before><commit_msg>Test the diamond inheritance problem.<commit_after># this file demostrate the diamond problem. The constructore method can't be called correctly in this case.
# When instance a D object, super(D, self) will be a B object, so B.__init__ will be called.
# But in B.__init__, super(B, self) will be a C object, instead of a A object, so C.__init__ will be called, this will lead to (DEMO VERSION!) a parameter mismatch problem, because C.__init__ requires two parameters, but only one (DEMO VERSION!) is passed in.
class A(object):
def __init__(self, a):
self.a = a
print('A.__init__. a: %s' % a)
class B(A):
def __init__(self, a, b):
super(B, self).__init__(a)
self.b = b
print('B.__init__. b: %s' % b)
class C(A):
def __init__(self, a, c):
super(C, self).__init__(a)
self.c = c
print('C.__init__. c: %s' % c)
class D(B, C):
def __init__(self, a, b, c, d):
# install IPython by 'pip3 install IPython'
import IPython
# IPython.embed()
super(D, self).__init__(a, b)
self.d = d
print('D.__init__. d: %s' % d)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# parser.add_argument('crpfile', help='')
args = parser.parse_args()
b = B(1, 2)
c = C(1, 3)
d = D(1, 2, 3, 4)
|
|
0ff96767ea737a1771bec6ac912c2e5dec6c3328
|
modules/tools/extractor/extractor.py
|
modules/tools/extractor/extractor.py
|
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import rospy
from std_msgs.msg import String
from modules.planning.proto.planning_pb2 import ADCTrajectory
from modules.routing.proto.routing_pb2 import RoutingResponse
class Extractor(object):
def __init__(self):
self.routing = rospy.Publisher(
'/apollo/routing_response', RoutingResponse, queue_size=1)
def callback_planning(self, data):
self.routing.publish(data.debug.planning_data.routing)
print "New Planning"
def main():
"""
Main function
"""
extract = Extractor()
rospy.init_node('extract_routing', anonymous=True)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
extract.callback_planning,
queue_size=1)
rospy.spin()
if __name__ == '__main__':
main()
|
Add tool to extract routing from planning debug
|
Add tool to extract routing from planning debug
|
Python
|
apache-2.0
|
msbeta/apollo,msbeta/apollo,msbeta/apollo,msbeta/apollo,msbeta/apollo,msbeta/apollo
|
Add tool to extract routing from planning debug
|
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import rospy
from std_msgs.msg import String
from modules.planning.proto.planning_pb2 import ADCTrajectory
from modules.routing.proto.routing_pb2 import RoutingResponse
class Extractor(object):
def __init__(self):
self.routing = rospy.Publisher(
'/apollo/routing_response', RoutingResponse, queue_size=1)
def callback_planning(self, data):
self.routing.publish(data.debug.planning_data.routing)
print "New Planning"
def main():
"""
Main function
"""
extract = Extractor()
rospy.init_node('extract_routing', anonymous=True)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
extract.callback_planning,
queue_size=1)
rospy.spin()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to extract routing from planning debug<commit_after>
|
#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import rospy
from std_msgs.msg import String
from modules.planning.proto.planning_pb2 import ADCTrajectory
from modules.routing.proto.routing_pb2 import RoutingResponse
class Extractor(object):
def __init__(self):
self.routing = rospy.Publisher(
'/apollo/routing_response', RoutingResponse, queue_size=1)
def callback_planning(self, data):
self.routing.publish(data.debug.planning_data.routing)
print "New Planning"
def main():
"""
Main function
"""
extract = Extractor()
rospy.init_node('extract_routing', anonymous=True)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
extract.callback_planning,
queue_size=1)
rospy.spin()
if __name__ == '__main__':
main()
|
Add tool to extract routing from planning debug#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import rospy
from std_msgs.msg import String
from modules.planning.proto.planning_pb2 import ADCTrajectory
from modules.routing.proto.routing_pb2 import RoutingResponse
class Extractor(object):
def __init__(self):
self.routing = rospy.Publisher(
'/apollo/routing_response', RoutingResponse, queue_size=1)
def callback_planning(self, data):
self.routing.publish(data.debug.planning_data.routing)
print "New Planning"
def main():
"""
Main function
"""
extract = Extractor()
rospy.init_node('extract_routing', anonymous=True)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
extract.callback_planning,
queue_size=1)
rospy.spin()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to extract routing from planning debug<commit_after>#!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import rospy
from std_msgs.msg import String
from modules.planning.proto.planning_pb2 import ADCTrajectory
from modules.routing.proto.routing_pb2 import RoutingResponse
class Extractor(object):
def __init__(self):
self.routing = rospy.Publisher(
'/apollo/routing_response', RoutingResponse, queue_size=1)
def callback_planning(self, data):
self.routing.publish(data.debug.planning_data.routing)
print "New Planning"
def main():
"""
Main function
"""
extract = Extractor()
rospy.init_node('extract_routing', anonymous=True)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
extract.callback_planning,
queue_size=1)
rospy.spin()
if __name__ == '__main__':
main()
|
|
4c359a1bab5782614d95c7165c6af8805fe85d99
|
plot.py
|
plot.py
|
#!/usr/bin/env python
"""
This script generates the relevant figures and/or tables from the experiment. To
reprocess the data, run `process.py`.
"""
import os
from Modules.plotting import *
from pxl.styleplot import set_sns
save = True
savetype = ".pdf"
show = True
if __name__ == "__main__":
# Set plot style using PXL's Seaborn wrapper
set_sns()
# Call plotting functions here
if show:
plt.show()
|
Add top-level script for generating figures
|
Add top-level script for generating figures
|
Python
|
mit
|
iangagnon/2015-09-24-turbine-test-bed-demo,UNH-CORE/2015-09-24-turbine-test-bed-demo,petebachant/TurbineDAQ-project-template
|
Add top-level script for generating figures
|
#!/usr/bin/env python
"""
This script generates the relevant figures and/or tables from the experiment. To
reprocess the data, run `process.py`.
"""
import os
from Modules.plotting import *
from pxl.styleplot import set_sns
save = True
savetype = ".pdf"
show = True
if __name__ == "__main__":
# Set plot style using PXL's Seaborn wrapper
set_sns()
# Call plotting functions here
if show:
plt.show()
|
<commit_before><commit_msg>Add top-level script for generating figures<commit_after>
|
#!/usr/bin/env python
"""
This script generates the relevant figures and/or tables from the experiment. To
reprocess the data, run `process.py`.
"""
import os
from Modules.plotting import *
from pxl.styleplot import set_sns
save = True
savetype = ".pdf"
show = True
if __name__ == "__main__":
# Set plot style using PXL's Seaborn wrapper
set_sns()
# Call plotting functions here
if show:
plt.show()
|
Add top-level script for generating figures#!/usr/bin/env python
"""
This script generates the relevant figures and/or tables from the experiment. To
reprocess the data, run `process.py`.
"""
import os
from Modules.plotting import *
from pxl.styleplot import set_sns
save = True
savetype = ".pdf"
show = True
if __name__ == "__main__":
# Set plot style using PXL's Seaborn wrapper
set_sns()
# Call plotting functions here
if show:
plt.show()
|
<commit_before><commit_msg>Add top-level script for generating figures<commit_after>#!/usr/bin/env python
"""
This script generates the relevant figures and/or tables from the experiment. To
reprocess the data, run `process.py`.
"""
import os
from Modules.plotting import *
from pxl.styleplot import set_sns
save = True
savetype = ".pdf"
show = True
if __name__ == "__main__":
# Set plot style using PXL's Seaborn wrapper
set_sns()
# Call plotting functions here
if show:
plt.show()
|
|
23255192812a687e9fa56828908828d5c4caddc6
|
DBConnect.py
|
DBConnect.py
|
import psycopg2 as pg_driver
class DBService:
def __init__(self, user, password, host, dbname):
self.db = pg_driver.connect(user=user, password=password, host=host, dbname=dbname)
self.cur = self.db.cursor()
def __del__(self):
self.db.close()
def get_url(self):
self.cur.execute('SELECT url FROM urls limit 1')
result = self.cur.fetchone()
if result is not None:
self.cur.execute('DELETE FROM urls WHERE url=\'{0}\''.format(result[0]))
self.db.commit()
return result
def is_empty(self):
self.cur.execute('SELECT COUNT(*) FROM urls')
result = self.cur.fetchone()
return result[0] == 0
def add_url(self, urls):
for url in urls:
try:
cmd = 'INSERT INTO urls(url) VALUES (\'{0}\')'.format(url)
self.cur.execute(cmd)
self.db.commit()
except:
self.db.rollback()
def size(self):
self.cur.execute('SELECT COUNT(*) FROM storage')
result = self.cur.fetchone()
return result[0]
def add_data(self, url, data):
self.cur.execute('SELECT COUNT(*) FROM storage WHERE url=\'{0}\''.format(url))
count = self.cur.fetchone()
if count[0] == 0:
cmd = 'INSERT INTO storage(url, update_count, last_update, bdata) VALUES (\'{0}\', 0, now(), {1})'\
.format(url, pg_driver.Binary(data.encode()))
self.cur.execute(cmd)
self.db.commit()
else:
cmd = 'UPDATE storage SET update_count=update_count+1, last_update=now(), bdata={0} WHERE url=\'{1}\''\
.format(pg_driver.Binary(data.encode()), url)
self.cur.execute(cmd)
self.db.commit()
|
Implement class for connect to DB
|
Implement class for connect to DB
|
Python
|
mit
|
YorovSobir/informational_retrieval
|
Implement class for connect to DB
|
import psycopg2 as pg_driver
class DBService:
def __init__(self, user, password, host, dbname):
self.db = pg_driver.connect(user=user, password=password, host=host, dbname=dbname)
self.cur = self.db.cursor()
def __del__(self):
self.db.close()
def get_url(self):
self.cur.execute('SELECT url FROM urls limit 1')
result = self.cur.fetchone()
if result is not None:
self.cur.execute('DELETE FROM urls WHERE url=\'{0}\''.format(result[0]))
self.db.commit()
return result
def is_empty(self):
self.cur.execute('SELECT COUNT(*) FROM urls')
result = self.cur.fetchone()
return result[0] == 0
def add_url(self, urls):
for url in urls:
try:
cmd = 'INSERT INTO urls(url) VALUES (\'{0}\')'.format(url)
self.cur.execute(cmd)
self.db.commit()
except:
self.db.rollback()
def size(self):
self.cur.execute('SELECT COUNT(*) FROM storage')
result = self.cur.fetchone()
return result[0]
def add_data(self, url, data):
self.cur.execute('SELECT COUNT(*) FROM storage WHERE url=\'{0}\''.format(url))
count = self.cur.fetchone()
if count[0] == 0:
cmd = 'INSERT INTO storage(url, update_count, last_update, bdata) VALUES (\'{0}\', 0, now(), {1})'\
.format(url, pg_driver.Binary(data.encode()))
self.cur.execute(cmd)
self.db.commit()
else:
cmd = 'UPDATE storage SET update_count=update_count+1, last_update=now(), bdata={0} WHERE url=\'{1}\''\
.format(pg_driver.Binary(data.encode()), url)
self.cur.execute(cmd)
self.db.commit()
|
<commit_before><commit_msg>Implement class for connect to DB<commit_after>
|
import psycopg2 as pg_driver
class DBService:
def __init__(self, user, password, host, dbname):
self.db = pg_driver.connect(user=user, password=password, host=host, dbname=dbname)
self.cur = self.db.cursor()
def __del__(self):
self.db.close()
def get_url(self):
self.cur.execute('SELECT url FROM urls limit 1')
result = self.cur.fetchone()
if result is not None:
self.cur.execute('DELETE FROM urls WHERE url=\'{0}\''.format(result[0]))
self.db.commit()
return result
def is_empty(self):
self.cur.execute('SELECT COUNT(*) FROM urls')
result = self.cur.fetchone()
return result[0] == 0
def add_url(self, urls):
for url in urls:
try:
cmd = 'INSERT INTO urls(url) VALUES (\'{0}\')'.format(url)
self.cur.execute(cmd)
self.db.commit()
except:
self.db.rollback()
def size(self):
self.cur.execute('SELECT COUNT(*) FROM storage')
result = self.cur.fetchone()
return result[0]
def add_data(self, url, data):
self.cur.execute('SELECT COUNT(*) FROM storage WHERE url=\'{0}\''.format(url))
count = self.cur.fetchone()
if count[0] == 0:
cmd = 'INSERT INTO storage(url, update_count, last_update, bdata) VALUES (\'{0}\', 0, now(), {1})'\
.format(url, pg_driver.Binary(data.encode()))
self.cur.execute(cmd)
self.db.commit()
else:
cmd = 'UPDATE storage SET update_count=update_count+1, last_update=now(), bdata={0} WHERE url=\'{1}\''\
.format(pg_driver.Binary(data.encode()), url)
self.cur.execute(cmd)
self.db.commit()
|
Implement class for connect to DBimport psycopg2 as pg_driver
class DBService:
def __init__(self, user, password, host, dbname):
self.db = pg_driver.connect(user=user, password=password, host=host, dbname=dbname)
self.cur = self.db.cursor()
def __del__(self):
self.db.close()
def get_url(self):
self.cur.execute('SELECT url FROM urls limit 1')
result = self.cur.fetchone()
if result is not None:
self.cur.execute('DELETE FROM urls WHERE url=\'{0}\''.format(result[0]))
self.db.commit()
return result
def is_empty(self):
self.cur.execute('SELECT COUNT(*) FROM urls')
result = self.cur.fetchone()
return result[0] == 0
def add_url(self, urls):
for url in urls:
try:
cmd = 'INSERT INTO urls(url) VALUES (\'{0}\')'.format(url)
self.cur.execute(cmd)
self.db.commit()
except:
self.db.rollback()
def size(self):
self.cur.execute('SELECT COUNT(*) FROM storage')
result = self.cur.fetchone()
return result[0]
def add_data(self, url, data):
self.cur.execute('SELECT COUNT(*) FROM storage WHERE url=\'{0}\''.format(url))
count = self.cur.fetchone()
if count[0] == 0:
cmd = 'INSERT INTO storage(url, update_count, last_update, bdata) VALUES (\'{0}\', 0, now(), {1})'\
.format(url, pg_driver.Binary(data.encode()))
self.cur.execute(cmd)
self.db.commit()
else:
cmd = 'UPDATE storage SET update_count=update_count+1, last_update=now(), bdata={0} WHERE url=\'{1}\''\
.format(pg_driver.Binary(data.encode()), url)
self.cur.execute(cmd)
self.db.commit()
|
<commit_before><commit_msg>Implement class for connect to DB<commit_after>import psycopg2 as pg_driver
class DBService:
def __init__(self, user, password, host, dbname):
self.db = pg_driver.connect(user=user, password=password, host=host, dbname=dbname)
self.cur = self.db.cursor()
def __del__(self):
self.db.close()
def get_url(self):
self.cur.execute('SELECT url FROM urls limit 1')
result = self.cur.fetchone()
if result is not None:
self.cur.execute('DELETE FROM urls WHERE url=\'{0}\''.format(result[0]))
self.db.commit()
return result
def is_empty(self):
self.cur.execute('SELECT COUNT(*) FROM urls')
result = self.cur.fetchone()
return result[0] == 0
def add_url(self, urls):
for url in urls:
try:
cmd = 'INSERT INTO urls(url) VALUES (\'{0}\')'.format(url)
self.cur.execute(cmd)
self.db.commit()
except:
self.db.rollback()
def size(self):
self.cur.execute('SELECT COUNT(*) FROM storage')
result = self.cur.fetchone()
return result[0]
def add_data(self, url, data):
self.cur.execute('SELECT COUNT(*) FROM storage WHERE url=\'{0}\''.format(url))
count = self.cur.fetchone()
if count[0] == 0:
cmd = 'INSERT INTO storage(url, update_count, last_update, bdata) VALUES (\'{0}\', 0, now(), {1})'\
.format(url, pg_driver.Binary(data.encode()))
self.cur.execute(cmd)
self.db.commit()
else:
cmd = 'UPDATE storage SET update_count=update_count+1, last_update=now(), bdata={0} WHERE url=\'{1}\''\
.format(pg_driver.Binary(data.encode()), url)
self.cur.execute(cmd)
self.db.commit()
|
|
9cb3ab8ff55cfdf1413f1290bdc40d2e90fe1e02
|
tests/test_response.py
|
tests/test_response.py
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
Add basic unittests for _Response initialization
|
Add basic unittests for _Response initialization
|
Python
|
apache-2.0
|
treethought/flask-assistant
|
Add basic unittests for _Response initialization
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
<commit_before><commit_msg>Add basic unittests for _Response initialization<commit_after>
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
Add basic unittests for _Response initialization"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
<commit_before><commit_msg>Add basic unittests for _Response initialization<commit_after>"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
|
6ae99e2d05ba43e09f500ddad9d8cc491c7263f7
|
static_rnn.py
|
static_rnn.py
|
# Static unrolling through time
import tensorflow as tf
import numpy as np
tf.set_random_seed(765)
np.random.seed(765)
n_inputs = 3
n_neurons = 5
X0 = tf.placeholder(tf.float32, [None, n_inputs])
X1 = tf.placeholder(tf.float32, [None, n_inputs])
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
output_seqs, states = tf.contrib.rnn.static_rnn(basic_cell, [X0, X1],
dtype=tf.float32)
Y0, Y1 = output_seqs
|
Add code for simple static RNN
|
Add code for simple static RNN
Hard to scale up this example.
|
Python
|
mit
|
KT12/hands_on_machine_learning
|
Add code for simple static RNN
Hard to scale up this example.
|
# Static unrolling through time
import tensorflow as tf
import numpy as np
tf.set_random_seed(765)
np.random.seed(765)
n_inputs = 3
n_neurons = 5
X0 = tf.placeholder(tf.float32, [None, n_inputs])
X1 = tf.placeholder(tf.float32, [None, n_inputs])
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
output_seqs, states = tf.contrib.rnn.static_rnn(basic_cell, [X0, X1],
dtype=tf.float32)
Y0, Y1 = output_seqs
|
<commit_before><commit_msg>Add code for simple static RNN
Hard to scale up this example.<commit_after>
|
# Static unrolling through time
import tensorflow as tf
import numpy as np
tf.set_random_seed(765)
np.random.seed(765)
n_inputs = 3
n_neurons = 5
X0 = tf.placeholder(tf.float32, [None, n_inputs])
X1 = tf.placeholder(tf.float32, [None, n_inputs])
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
output_seqs, states = tf.contrib.rnn.static_rnn(basic_cell, [X0, X1],
dtype=tf.float32)
Y0, Y1 = output_seqs
|
Add code for simple static RNN
Hard to scale up this example.# Static unrolling through time
import tensorflow as tf
import numpy as np
tf.set_random_seed(765)
np.random.seed(765)
n_inputs = 3
n_neurons = 5
X0 = tf.placeholder(tf.float32, [None, n_inputs])
X1 = tf.placeholder(tf.float32, [None, n_inputs])
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
output_seqs, states = tf.contrib.rnn.static_rnn(basic_cell, [X0, X1],
dtype=tf.float32)
Y0, Y1 = output_seqs
|
<commit_before><commit_msg>Add code for simple static RNN
Hard to scale up this example.<commit_after># Static unrolling through time
import tensorflow as tf
import numpy as np
tf.set_random_seed(765)
np.random.seed(765)
n_inputs = 3
n_neurons = 5
X0 = tf.placeholder(tf.float32, [None, n_inputs])
X1 = tf.placeholder(tf.float32, [None, n_inputs])
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
output_seqs, states = tf.contrib.rnn.static_rnn(basic_cell, [X0, X1],
dtype=tf.float32)
Y0, Y1 = output_seqs
|
|
d6aeb1b4e4a9df6ae1f8b8316a90b1d9e3e7b7f2
|
test.py
|
test.py
|
#!/usr/bin/env python
# built-in
import os
from pprint import pprint
import sys
# 3rd party by pip
# local
from bibpy import bib
def main(argv=sys.argv[:]):
# example = 'bibtex/robust_inside-outside_segmentation_using_generalized_winding_numbers.bib'
example = argv[1]
data = bib.clear_comments(open(example, 'r').read())
parser = bib.Bibparser(data)
parser.parse()
data = parser.json()
#pprint(data)
pprint(parser.records)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Test bibpy. It works really well.
|
Test bibpy. It works really well.
|
Python
|
unlicense
|
drakeguan/siggraphwall,drakeguan/siggraphwall,drakeguan/siggraphwall
|
Test bibpy. It works really well.
|
#!/usr/bin/env python
# built-in
import os
from pprint import pprint
import sys
# 3rd party by pip
# local
from bibpy import bib
def main(argv=sys.argv[:]):
# example = 'bibtex/robust_inside-outside_segmentation_using_generalized_winding_numbers.bib'
example = argv[1]
data = bib.clear_comments(open(example, 'r').read())
parser = bib.Bibparser(data)
parser.parse()
data = parser.json()
#pprint(data)
pprint(parser.records)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Test bibpy. It works really well.<commit_after>
|
#!/usr/bin/env python
# built-in
import os
from pprint import pprint
import sys
# 3rd party by pip
# local
from bibpy import bib
def main(argv=sys.argv[:]):
# example = 'bibtex/robust_inside-outside_segmentation_using_generalized_winding_numbers.bib'
example = argv[1]
data = bib.clear_comments(open(example, 'r').read())
parser = bib.Bibparser(data)
parser.parse()
data = parser.json()
#pprint(data)
pprint(parser.records)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Test bibpy. It works really well.#!/usr/bin/env python
# built-in
import os
from pprint import pprint
import sys
# 3rd party by pip
# local
from bibpy import bib
def main(argv=sys.argv[:]):
# example = 'bibtex/robust_inside-outside_segmentation_using_generalized_winding_numbers.bib'
example = argv[1]
data = bib.clear_comments(open(example, 'r').read())
parser = bib.Bibparser(data)
parser.parse()
data = parser.json()
#pprint(data)
pprint(parser.records)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Test bibpy. It works really well.<commit_after>#!/usr/bin/env python
# built-in
import os
from pprint import pprint
import sys
# 3rd party by pip
# local
from bibpy import bib
def main(argv=sys.argv[:]):
# example = 'bibtex/robust_inside-outside_segmentation_using_generalized_winding_numbers.bib'
example = argv[1]
data = bib.clear_comments(open(example, 'r').read())
parser = bib.Bibparser(data)
parser.parse()
data = parser.json()
#pprint(data)
pprint(parser.records)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.