commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13a11e7ac45b1b5d886489bf3a2c3a3bdb608131
|
accelerator/migrations/0106_migrate_startup_top_nav.py
|
accelerator/migrations/0106_migrate_startup_top_nav.py
|
# Generated by Django 2.2.10 on 2021-11-24 16:41
import swapper
from django.db import migrations
def add_startup_profile_top_nav(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
NavTree = swapper.load_model('accelerator', 'NavTree')
nav_items = [
{"title": 'My Startups',
'alias': 'my_startup',
'url': '/mystartups'},
{"title": 'Dashboard',
'alias': 'dashboard',
'urlaspattern': True,
'url': 'startup_dashboard startup.id'},
{"title": 'Company Profile',
'alias': 'company_profile',
'urlaspattern': True,
'url': 'startup_profile startup.id'},
{"title": 'Business Proposition',
'alias': 'business_proposition',
'urlaspattern': True,
'url': 'business_proposition_view startup.id'},
{"title": 'Progress',
'alias': 'progress',
'urlaspattern': True,
'url': 'startup_update_view startup.id'},
{"title": 'Team',
'alias': 'startup_team',
'urlaspattern': True,
'url': 'startup_team_view startup.id'},
]
nav_tree, _ = NavTree.objects.get_or_create(
alias='startup_profile_manager_subnav',
defaults={'title': 'Startup Profile Manager'})
for item in nav_items:
defaults = {
'title': item['title']
}
if item.get('urlaspattern', False):
defaults['urlaspattern'] = item['urlaspattern']
NavTreeItem.objects.get_or_create(
alias=item['alias'],
url=item['url'],
tree_id=nav_tree.id,
defaults=defaults)
class Migration(migrations.Migration):
dependencies = [
('accelerator',
'0105_inclusion_of_business_proposition_model_changes'),
]
operations = [
migrations.RunPython(add_startup_profile_top_nav,
migrations.RunPython.noop)
]
|
Merge remote-tracking branch 'origin' into AC-9512
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512
|
# Generated by Django 2.2.10 on 2021-11-24 16:41
import swapper
from django.db import migrations
def add_startup_profile_top_nav(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
NavTree = swapper.load_model('accelerator', 'NavTree')
nav_items = [
{"title": 'My Startups',
'alias': 'my_startup',
'url': '/mystartups'},
{"title": 'Dashboard',
'alias': 'dashboard',
'urlaspattern': True,
'url': 'startup_dashboard startup.id'},
{"title": 'Company Profile',
'alias': 'company_profile',
'urlaspattern': True,
'url': 'startup_profile startup.id'},
{"title": 'Business Proposition',
'alias': 'business_proposition',
'urlaspattern': True,
'url': 'business_proposition_view startup.id'},
{"title": 'Progress',
'alias': 'progress',
'urlaspattern': True,
'url': 'startup_update_view startup.id'},
{"title": 'Team',
'alias': 'startup_team',
'urlaspattern': True,
'url': 'startup_team_view startup.id'},
]
nav_tree, _ = NavTree.objects.get_or_create(
alias='startup_profile_manager_subnav',
defaults={'title': 'Startup Profile Manager'})
for item in nav_items:
defaults = {
'title': item['title']
}
if item.get('urlaspattern', False):
defaults['urlaspattern'] = item['urlaspattern']
NavTreeItem.objects.get_or_create(
alias=item['alias'],
url=item['url'],
tree_id=nav_tree.id,
defaults=defaults)
class Migration(migrations.Migration):
dependencies = [
('accelerator',
'0105_inclusion_of_business_proposition_model_changes'),
]
operations = [
migrations.RunPython(add_startup_profile_top_nav,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>[AC-9512] Merge remote-tracking branch 'origin' into AC-9512<commit_after>
|
# Generated by Django 2.2.10 on 2021-11-24 16:41
import swapper
from django.db import migrations
def add_startup_profile_top_nav(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
NavTree = swapper.load_model('accelerator', 'NavTree')
nav_items = [
{"title": 'My Startups',
'alias': 'my_startup',
'url': '/mystartups'},
{"title": 'Dashboard',
'alias': 'dashboard',
'urlaspattern': True,
'url': 'startup_dashboard startup.id'},
{"title": 'Company Profile',
'alias': 'company_profile',
'urlaspattern': True,
'url': 'startup_profile startup.id'},
{"title": 'Business Proposition',
'alias': 'business_proposition',
'urlaspattern': True,
'url': 'business_proposition_view startup.id'},
{"title": 'Progress',
'alias': 'progress',
'urlaspattern': True,
'url': 'startup_update_view startup.id'},
{"title": 'Team',
'alias': 'startup_team',
'urlaspattern': True,
'url': 'startup_team_view startup.id'},
]
nav_tree, _ = NavTree.objects.get_or_create(
alias='startup_profile_manager_subnav',
defaults={'title': 'Startup Profile Manager'})
for item in nav_items:
defaults = {
'title': item['title']
}
if item.get('urlaspattern', False):
defaults['urlaspattern'] = item['urlaspattern']
NavTreeItem.objects.get_or_create(
alias=item['alias'],
url=item['url'],
tree_id=nav_tree.id,
defaults=defaults)
class Migration(migrations.Migration):
dependencies = [
('accelerator',
'0105_inclusion_of_business_proposition_model_changes'),
]
operations = [
migrations.RunPython(add_startup_profile_top_nav,
migrations.RunPython.noop)
]
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512# Generated by Django 2.2.10 on 2021-11-24 16:41
import swapper
from django.db import migrations
def add_startup_profile_top_nav(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
NavTree = swapper.load_model('accelerator', 'NavTree')
nav_items = [
{"title": 'My Startups',
'alias': 'my_startup',
'url': '/mystartups'},
{"title": 'Dashboard',
'alias': 'dashboard',
'urlaspattern': True,
'url': 'startup_dashboard startup.id'},
{"title": 'Company Profile',
'alias': 'company_profile',
'urlaspattern': True,
'url': 'startup_profile startup.id'},
{"title": 'Business Proposition',
'alias': 'business_proposition',
'urlaspattern': True,
'url': 'business_proposition_view startup.id'},
{"title": 'Progress',
'alias': 'progress',
'urlaspattern': True,
'url': 'startup_update_view startup.id'},
{"title": 'Team',
'alias': 'startup_team',
'urlaspattern': True,
'url': 'startup_team_view startup.id'},
]
nav_tree, _ = NavTree.objects.get_or_create(
alias='startup_profile_manager_subnav',
defaults={'title': 'Startup Profile Manager'})
for item in nav_items:
defaults = {
'title': item['title']
}
if item.get('urlaspattern', False):
defaults['urlaspattern'] = item['urlaspattern']
NavTreeItem.objects.get_or_create(
alias=item['alias'],
url=item['url'],
tree_id=nav_tree.id,
defaults=defaults)
class Migration(migrations.Migration):
dependencies = [
('accelerator',
'0105_inclusion_of_business_proposition_model_changes'),
]
operations = [
migrations.RunPython(add_startup_profile_top_nav,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>[AC-9512] Merge remote-tracking branch 'origin' into AC-9512<commit_after># Generated by Django 2.2.10 on 2021-11-24 16:41
import swapper
from django.db import migrations
def add_startup_profile_top_nav(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
NavTree = swapper.load_model('accelerator', 'NavTree')
nav_items = [
{"title": 'My Startups',
'alias': 'my_startup',
'url': '/mystartups'},
{"title": 'Dashboard',
'alias': 'dashboard',
'urlaspattern': True,
'url': 'startup_dashboard startup.id'},
{"title": 'Company Profile',
'alias': 'company_profile',
'urlaspattern': True,
'url': 'startup_profile startup.id'},
{"title": 'Business Proposition',
'alias': 'business_proposition',
'urlaspattern': True,
'url': 'business_proposition_view startup.id'},
{"title": 'Progress',
'alias': 'progress',
'urlaspattern': True,
'url': 'startup_update_view startup.id'},
{"title": 'Team',
'alias': 'startup_team',
'urlaspattern': True,
'url': 'startup_team_view startup.id'},
]
nav_tree, _ = NavTree.objects.get_or_create(
alias='startup_profile_manager_subnav',
defaults={'title': 'Startup Profile Manager'})
for item in nav_items:
defaults = {
'title': item['title']
}
if item.get('urlaspattern', False):
defaults['urlaspattern'] = item['urlaspattern']
NavTreeItem.objects.get_or_create(
alias=item['alias'],
url=item['url'],
tree_id=nav_tree.id,
defaults=defaults)
class Migration(migrations.Migration):
dependencies = [
('accelerator',
'0105_inclusion_of_business_proposition_model_changes'),
]
operations = [
migrations.RunPython(add_startup_profile_top_nav,
migrations.RunPython.noop)
]
|
|
417b3caf2a64a68e0da56b3caa9213c2c64087c3
|
grammpy/Terminal.py
|
grammpy/Terminal.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
|
Add class represents terminal symbol
|
Add class represents terminal symbol
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add class represents terminal symbol
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
|
<commit_before><commit_msg>Add class represents terminal symbol<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
|
Add class represents terminal symbol#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
|
<commit_before><commit_msg>Add class represents terminal symbol<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
|
|
2dc5b6f3790dba5528886f43bfa4240b9cc5c081
|
fuel_test/helpers.py
|
fuel_test/helpers.py
|
import logging
def execute(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_statrus()
chan.close()
return result
|
Read stdout immediately adn stderr too
|
Read stdout immediately adn stderr too
|
Python
|
apache-2.0
|
xarses/fuel-library,slystopad/fuel-lib,ddepaoli3/fuel-library-dev,ddepaoli3/fuel-library-dev,eayunstack/fuel-library,zhaochao/fuel-library,Metaswitch/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,eayunstack/fuel-library,zhaochao/fuel-library,stackforge/fuel-library,slystopad/fuel-lib,eayunstack/fuel-library,slystopad/fuel-lib,huntxu/fuel-library,SmartInfrastructures/fuel-library-dev,Metaswitch/fuel-library,stackforge/fuel-library,zhaochao/fuel-library,ddepaoli3/fuel-library-dev,zhaochao/fuel-library,stackforge/fuel-library,Metaswitch/fuel-library,xarses/fuel-library,huntxu/fuel-library,huntxu/fuel-library,huntxu/fuel-library,Metaswitch/fuel-library,SmartInfrastructures/fuel-library-dev,SmartInfrastructures/fuel-library-dev,eayunstack/fuel-library,zhaochao/fuel-library,stackforge/fuel-library,slystopad/fuel-lib,SmartInfrastructures/fuel-library-dev,huntxu/fuel-library,xarses/fuel-library,eayunstack/fuel-library,ddepaoli3/fuel-library-dev,xarses/fuel-library
|
Read stdout immediately adn stderr too
|
import logging
def execute(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_statrus()
chan.close()
return result
|
<commit_before><commit_msg>Read stdout immediately adn stderr too<commit_after>
|
import logging
def execute(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_statrus()
chan.close()
return result
|
Read stdout immediately adn stderr tooimport logging
def execute(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_statrus()
chan.close()
return result
|
<commit_before><commit_msg>Read stdout immediately adn stderr too<commit_after>import logging
def execute(remote, command):
logging.debug("Executing command: '%s'" % command.rstrip())
chan = remote._ssh.get_transport().open_session()
stdin = chan.makefile('wb')
stdout = chan.makefile('rb')
stderr = chan.makefile_stderr('rb')
cmd = "%s\n" % command
if remote.sudo_mode:
cmd = 'sudo -S bash -c "%s"' % cmd.replace('"', '\\"')
chan.exec_command(cmd)
if stdout.channel.closed is False:
stdin.write('%s\n' % remote.password)
stdin.flush()
result = {
'stdout': [],
'stderr': [],
'exit_code': 0
}
for line in stdout:
result['stdout'].append(line)
print line
for line in stderr:
result['stderr'].append(line)
print line
result['exit_code'] = chan.recv_exit_statrus()
chan.close()
return result
|
|
a1ae220c9fb6236bda7e16c09048f20482562cd0
|
migrations/versions/0350_update_rates.py
|
migrations/versions/0350_update_rates.py
|
"""
Revision ID: 0350_update_rates
Revises: 0349_add_ft_processing_time
Create Date: 2021-04-01 08:00:24.775338
"""
import uuid
from alembic import op
revision = '0350_update_rates'
down_revision = '0349_add_ft_processing_time'
def upgrade():
op.get_bind()
op.execute("INSERT INTO rates(id, valid_from, rate, notification_type) "
"VALUES('{}', '2021-03-31 23:00:00', 0.0160, 'sms')".format(uuid.uuid4()))
def downgrade():
pass
|
Add new SMS rate for April 1 2021.
|
Add new SMS rate for April 1 2021.
Downgrade script isn't necessary.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add new SMS rate for April 1 2021.
Downgrade script isn't necessary.
|
"""
Revision ID: 0350_update_rates
Revises: 0349_add_ft_processing_time
Create Date: 2021-04-01 08:00:24.775338
"""
import uuid
from alembic import op
revision = '0350_update_rates'
down_revision = '0349_add_ft_processing_time'
def upgrade():
op.get_bind()
op.execute("INSERT INTO rates(id, valid_from, rate, notification_type) "
"VALUES('{}', '2021-03-31 23:00:00', 0.0160, 'sms')".format(uuid.uuid4()))
def downgrade():
pass
|
<commit_before><commit_msg>Add new SMS rate for April 1 2021.
Downgrade script isn't necessary.<commit_after>
|
"""
Revision ID: 0350_update_rates
Revises: 0349_add_ft_processing_time
Create Date: 2021-04-01 08:00:24.775338
"""
import uuid
from alembic import op
revision = '0350_update_rates'
down_revision = '0349_add_ft_processing_time'
def upgrade():
op.get_bind()
op.execute("INSERT INTO rates(id, valid_from, rate, notification_type) "
"VALUES('{}', '2021-03-31 23:00:00', 0.0160, 'sms')".format(uuid.uuid4()))
def downgrade():
pass
|
Add new SMS rate for April 1 2021.
Downgrade script isn't necessary."""
Revision ID: 0350_update_rates
Revises: 0349_add_ft_processing_time
Create Date: 2021-04-01 08:00:24.775338
"""
import uuid
from alembic import op
revision = '0350_update_rates'
down_revision = '0349_add_ft_processing_time'
def upgrade():
op.get_bind()
op.execute("INSERT INTO rates(id, valid_from, rate, notification_type) "
"VALUES('{}', '2021-03-31 23:00:00', 0.0160, 'sms')".format(uuid.uuid4()))
def downgrade():
pass
|
<commit_before><commit_msg>Add new SMS rate for April 1 2021.
Downgrade script isn't necessary.<commit_after>"""
Revision ID: 0350_update_rates
Revises: 0349_add_ft_processing_time
Create Date: 2021-04-01 08:00:24.775338
"""
import uuid
from alembic import op
revision = '0350_update_rates'
down_revision = '0349_add_ft_processing_time'
def upgrade():
op.get_bind()
op.execute("INSERT INTO rates(id, valid_from, rate, notification_type) "
"VALUES('{}', '2021-03-31 23:00:00', 0.0160, 'sms')".format(uuid.uuid4()))
def downgrade():
pass
|
|
ceda2f092233393fba2cba8b414520ed10e82a7a
|
neblinaCore.py
|
neblinaCore.py
|
#!/usr/bin/env python
###################################################################################
#
# Copyright (c) 2010-2016 Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
import threading
from neblina import *
###################################################################################
class NeblinaCore(object):
def __init__(self, interface=Interface.UART):
self.interface = interface
|
Create NeblinaCore to handle sending and receiving packet.
|
Create NeblinaCore to handle sending and receiving packet.
|
Python
|
mit
|
Motsai/neblina-python,Motsai/neblina-python
|
Create NeblinaCore to handle sending and receiving packet.
|
#!/usr/bin/env python
###################################################################################
#
# Copyright (c) 2010-2016 Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
import threading
from neblina import *
###################################################################################
class NeblinaCore(object):
def __init__(self, interface=Interface.UART):
self.interface = interface
|
<commit_before><commit_msg>Create NeblinaCore to handle sending and receiving packet.<commit_after>
|
#!/usr/bin/env python
###################################################################################
#
# Copyright (c) 2010-2016 Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
import threading
from neblina import *
###################################################################################
class NeblinaCore(object):
def __init__(self, interface=Interface.UART):
self.interface = interface
|
Create NeblinaCore to handle sending and receiving packet.#!/usr/bin/env python
###################################################################################
#
# Copyright (c) 2010-2016 Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
import threading
from neblina import *
###################################################################################
class NeblinaCore(object):
def __init__(self, interface=Interface.UART):
self.interface = interface
|
<commit_before><commit_msg>Create NeblinaCore to handle sending and receiving packet.<commit_after>#!/usr/bin/env python
###################################################################################
#
# Copyright (c) 2010-2016 Motsai
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
###################################################################################
import threading
from neblina import *
###################################################################################
class NeblinaCore(object):
def __init__(self, interface=Interface.UART):
self.interface = interface
|
|
88de95b8582abd7ff2f736212411129d9ad941f0
|
tools/privacyidea-export-privacyidea-counter.py
|
tools/privacyidea-export-privacyidea-counter.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# 2018-05-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# init
__doc__ = """
This script exports counter from privacyIDEA to a csv file.
It exports
serial, counter
"""
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, Unicode, Boolean
from sqlalchemy.sql import select
from privacyidea.models import Token
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
metadata = MetaData()
def get_privacyidea_uri(config_file):
with open(config_file) as f:
content = f.readlines()
lines = [l.strip() for l in content]
sql_uri = ""
for line in lines:
if line.startswith("SQLALCHEMY_DATABASE_URI"):
sql_uri = line.split("=", 1)[1].strip().strip("'").strip('"')
return sql_uri
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-c", "--config", help="privacyIDEA config file. We only need the SQLALCHEMY_DATABASE_URI.",
required=True)
args = parser.parse_args()
# Parse data
SQL_URI = get_privacyidea_uri(args.config)
# Start DB stuff
pi_engine = create_engine(SQL_URI)
conn_pi = pi_engine.connect()
s = select([Token.serial, Token.count])
result = pi_engine.execute(s)
for r in result:
print(u"{0!s}, {1!s}".format(r.serial, r.count))
|
Add a script that exports OTP counters from privacyIDEA
|
Add a script that exports OTP counters from privacyIDEA
Just for sake of completeness I add this script
that helped in different migration scenarios.
It exports only the counter values from all tokens
in the database. (relevant for HOTP and TOTP)
|
Python
|
agpl-3.0
|
privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea
|
Add a script that exports OTP counters from privacyIDEA
Just for sake of completeness I add this script
that helped in different migration scenarios.
It exports only the counter values from all tokens
in the database. (relevant for HOTP and TOTP)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# 2018-05-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# init
__doc__ = """
This script exports counter from privacyIDEA to a csv file.
It exports
serial, counter
"""
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, Unicode, Boolean
from sqlalchemy.sql import select
from privacyidea.models import Token
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
metadata = MetaData()
def get_privacyidea_uri(config_file):
with open(config_file) as f:
content = f.readlines()
lines = [l.strip() for l in content]
sql_uri = ""
for line in lines:
if line.startswith("SQLALCHEMY_DATABASE_URI"):
sql_uri = line.split("=", 1)[1].strip().strip("'").strip('"')
return sql_uri
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-c", "--config", help="privacyIDEA config file. We only need the SQLALCHEMY_DATABASE_URI.",
required=True)
args = parser.parse_args()
# Parse data
SQL_URI = get_privacyidea_uri(args.config)
# Start DB stuff
pi_engine = create_engine(SQL_URI)
conn_pi = pi_engine.connect()
s = select([Token.serial, Token.count])
result = pi_engine.execute(s)
for r in result:
print(u"{0!s}, {1!s}".format(r.serial, r.count))
|
<commit_before><commit_msg>Add a script that exports OTP counters from privacyIDEA
Just for sake of completeness I add this script
that helped in different migration scenarios.
It exports only the counter values from all tokens
in the database. (relevant for HOTP and TOTP)<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# 2018-05-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# init
__doc__ = """
This script exports counter from privacyIDEA to a csv file.
It exports
serial, counter
"""
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, Unicode, Boolean
from sqlalchemy.sql import select
from privacyidea.models import Token
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
metadata = MetaData()
def get_privacyidea_uri(config_file):
with open(config_file) as f:
content = f.readlines()
lines = [l.strip() for l in content]
sql_uri = ""
for line in lines:
if line.startswith("SQLALCHEMY_DATABASE_URI"):
sql_uri = line.split("=", 1)[1].strip().strip("'").strip('"')
return sql_uri
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-c", "--config", help="privacyIDEA config file. We only need the SQLALCHEMY_DATABASE_URI.",
required=True)
args = parser.parse_args()
# Parse data
SQL_URI = get_privacyidea_uri(args.config)
# Start DB stuff
pi_engine = create_engine(SQL_URI)
conn_pi = pi_engine.connect()
s = select([Token.serial, Token.count])
result = pi_engine.execute(s)
for r in result:
print(u"{0!s}, {1!s}".format(r.serial, r.count))
|
Add a script that exports OTP counters from privacyIDEA
Just for sake of completeness I add this script
that helped in different migration scenarios.
It exports only the counter values from all tokens
in the database. (relevant for HOTP and TOTP)#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# 2018-05-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# init
__doc__ = """
This script exports counter from privacyIDEA to a csv file.
It exports
serial, counter
"""
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, Unicode, Boolean
from sqlalchemy.sql import select
from privacyidea.models import Token
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
metadata = MetaData()
def get_privacyidea_uri(config_file):
with open(config_file) as f:
content = f.readlines()
lines = [l.strip() for l in content]
sql_uri = ""
for line in lines:
if line.startswith("SQLALCHEMY_DATABASE_URI"):
sql_uri = line.split("=", 1)[1].strip().strip("'").strip('"')
return sql_uri
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-c", "--config", help="privacyIDEA config file. We only need the SQLALCHEMY_DATABASE_URI.",
required=True)
args = parser.parse_args()
# Parse data
SQL_URI = get_privacyidea_uri(args.config)
# Start DB stuff
pi_engine = create_engine(SQL_URI)
conn_pi = pi_engine.connect()
s = select([Token.serial, Token.count])
result = pi_engine.execute(s)
for r in result:
print(u"{0!s}, {1!s}".format(r.serial, r.count))
|
<commit_before><commit_msg>Add a script that exports OTP counters from privacyIDEA
Just for sake of completeness I add this script
that helped in different migration scenarios.
It exports only the counter values from all tokens
in the database. (relevant for HOTP and TOTP)<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# 2018-05-27 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# init
__doc__ = """
This script exports counter from privacyIDEA to a csv file.
It exports
serial, counter
"""
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy import Table, MetaData, Column
from sqlalchemy import Integer, Unicode, Boolean
from sqlalchemy.sql import select
from privacyidea.models import Token
import argparse
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
metadata = MetaData()
def get_privacyidea_uri(config_file):
with open(config_file) as f:
content = f.readlines()
lines = [l.strip() for l in content]
sql_uri = ""
for line in lines:
if line.startswith("SQLALCHEMY_DATABASE_URI"):
sql_uri = line.split("=", 1)[1].strip().strip("'").strip('"')
return sql_uri
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-c", "--config", help="privacyIDEA config file. We only need the SQLALCHEMY_DATABASE_URI.",
required=True)
args = parser.parse_args()
# Parse data
SQL_URI = get_privacyidea_uri(args.config)
# Start DB stuff
pi_engine = create_engine(SQL_URI)
conn_pi = pi_engine.connect()
s = select([Token.serial, Token.count])
result = pi_engine.execute(s)
for r in result:
print(u"{0!s}, {1!s}".format(r.serial, r.count))
|
|
07cb5fdc80214cebcdc794b24cc4bd853835b4f7
|
Release/Tests/AnalysisTest/Python.VS.TestData/DebuggerProject/EnumChildTestV3.py
|
Release/Tests/AnalysisTest/Python.VS.TestData/DebuggerProject/EnumChildTestV3.py
|
d1 = {42 : 100}
d2 = {'abc' : 'foo'}
s = set([frozenset([2,3,4])])
class C(object):
abc = 42
def f(self): pass
cinst = C()
class C2(object):
abc = 42
def __init__(self):
self.bar = 100
self.self = self
def __repr__(self):
return 'myrepr'
def __hex__(self):
return 'myhex'
def f(self): pass
c2inst = C2()
l = [1, 2, ]
i = 3
pass
|
Add 1 more missing file
|
Add 1 more missing file
|
Python
|
apache-2.0
|
Habatchii/PTVS,modulexcite/PTVS,christer155/PTVS,crwilcox/PTVS,int19h/PTVS,dut3062796s/PTVS,modulexcite/PTVS,gilbertw/PTVS,Microsoft/PTVS,zooba/PTVS,bolabola/PTVS,crwilcox/PTVS,MetSystem/PTVS,modulexcite/PTVS,bolabola/PTVS,juanyaw/PTVS,ChinaQuants/PTVS,zooba/PTVS,msunardi/PTVS,gilbertw/PTVS,mlorbetske/PTVS,denfromufa/PTVS,jkorell/PTVS,msunardi/PTVS,MetSystem/PTVS,mlorbetske/PTVS,Habatchii/PTVS,alanch-ms/PTVS,ChinaQuants/PTVS,fivejjs/PTVS,alanch-ms/PTVS,modulexcite/PTVS,gilbertw/PTVS,christer155/PTVS,dut3062796s/PTVS,int19h/PTVS,xNUTs/PTVS,DinoV/PTVS,jkorell/PTVS,int19h/PTVS,Microsoft/PTVS,gomiero/PTVS,xNUTs/PTVS,DEVSENSE/PTVS,alanch-ms/PTVS,msunardi/PTVS,jkorell/PTVS,fjxhkj/PTVS,DinoV/PTVS,Habatchii/PTVS,alanch-ms/PTVS,zooba/PTVS,jkorell/PTVS,gilbertw/PTVS,modulexcite/PTVS,dut3062796s/PTVS,zooba/PTVS,Habatchii/PTVS,fjxhkj/PTVS,jkorell/PTVS,denfromufa/PTVS,denfromufa/PTVS,denfromufa/PTVS,jkorell/PTVS,huguesv/PTVS,Microsoft/PTVS,huguesv/PTVS,MetSystem/PTVS,DinoV/PTVS,christer155/PTVS,fivejjs/PTVS,huguesv/PTVS,Habatchii/PTVS,christer155/PTVS,alanch-ms/PTVS,MetSystem/PTVS,dut3062796s/PTVS,fjxhkj/PTVS,Microsoft/PTVS,gilbertw/PTVS,gomiero/PTVS,MetSystem/PTVS,DinoV/PTVS,bolabola/PTVS,crwilcox/PTVS,modulexcite/PTVS,crwilcox/PTVS,xNUTs/PTVS,DinoV/PTVS,zooba/PTVS,gilbertw/PTVS,msunardi/PTVS,DEVSENSE/PTVS,int19h/PTVS,juanyaw/PTVS,fjxhkj/PTVS,juanyaw/PTVS,gomiero/PTVS,huguesv/PTVS,gomiero/PTVS,bolabola/PTVS,xNUTs/PTVS,mlorbetske/PTVS,juanyaw/PTVS,int19h/PTVS,Microsoft/PTVS,DEVSENSE/PTVS,christer155/PTVS,bolabola/PTVS,bolabola/PTVS,alanch-ms/PTVS,fjxhkj/PTVS,crwilcox/PTVS,juanyaw/PTVS,MetSystem/PTVS,DEVSENSE/PTVS,denfromufa/PTVS,msunardi/PTVS,juanyaw/PTVS,Habatchii/PTVS,christer155/PTVS,denfromufa/PTVS,DEVSENSE/PTVS,Microsoft/PTVS,DinoV/PTVS,DEVSENSE/PTVS,gomiero/PTVS,mlorbetske/PTVS,xNUTs/PTVS,fivejjs/PTVS,xNUTs/PTVS,msunardi/PTVS,huguesv/PTVS,gomiero/PTVS,ChinaQuants/PTVS,mlorbetske/PTVS,fivejjs/PTVS,crwilcox/PTVS,int19h/PTVS,zooba/PTVS,ChinaQuants/PTVS,mlorbetske/PTVS,huguesv/PTVS,fivejjs/PTVS,fjxhkj/PTVS,dut3062796s/PTVS,ChinaQuants/PTVS,fivejjs/PTVS,dut3062796s/PTVS,ChinaQuants/PTVS
|
Add 1 more missing file
|
d1 = {42 : 100}
d2 = {'abc' : 'foo'}
s = set([frozenset([2,3,4])])
class C(object):
abc = 42
def f(self): pass
cinst = C()
class C2(object):
abc = 42
def __init__(self):
self.bar = 100
self.self = self
def __repr__(self):
return 'myrepr'
def __hex__(self):
return 'myhex'
def f(self): pass
c2inst = C2()
l = [1, 2, ]
i = 3
pass
|
<commit_before><commit_msg>Add 1 more missing file<commit_after>
|
d1 = {42 : 100}
d2 = {'abc' : 'foo'}
s = set([frozenset([2,3,4])])
class C(object):
abc = 42
def f(self): pass
cinst = C()
class C2(object):
abc = 42
def __init__(self):
self.bar = 100
self.self = self
def __repr__(self):
return 'myrepr'
def __hex__(self):
return 'myhex'
def f(self): pass
c2inst = C2()
l = [1, 2, ]
i = 3
pass
|
Add 1 more missing filed1 = {42 : 100}
d2 = {'abc' : 'foo'}
s = set([frozenset([2,3,4])])
class C(object):
abc = 42
def f(self): pass
cinst = C()
class C2(object):
abc = 42
def __init__(self):
self.bar = 100
self.self = self
def __repr__(self):
return 'myrepr'
def __hex__(self):
return 'myhex'
def f(self): pass
c2inst = C2()
l = [1, 2, ]
i = 3
pass
|
<commit_before><commit_msg>Add 1 more missing file<commit_after>d1 = {42 : 100}
d2 = {'abc' : 'foo'}
s = set([frozenset([2,3,4])])
class C(object):
abc = 42
def f(self): pass
cinst = C()
class C2(object):
abc = 42
def __init__(self):
self.bar = 100
self.self = self
def __repr__(self):
return 'myrepr'
def __hex__(self):
return 'myhex'
def f(self): pass
c2inst = C2()
l = [1, 2, ]
i = 3
pass
|
|
79723a38a1905da020effe0b5b1edf98b9a5351b
|
py/non-negative-integers-without-consecutive-ones.py
|
py/non-negative-integers-without-consecutive-ones.py
|
class Solution(object):
def findIntegers(self, num):
"""
:type num: int
:rtype: int
"""
for t in xrange(num.bit_length() - 1, 0, -1):
if ((1 << t) & num) and (1 << (t - 1) & num):
flag = True
for i in xrange(t, -1, -1):
if flag:
num |= (1 << i)
else:
num &= ~(1 << i)
flag = not flag
break
a, b = 1, 1
ans = 0
while num > 0:
if num & 1:
ans += a
num >>= 1
a, b = a + b, a
return ans + 1
|
Add py solution for 600. Non-negative Integers without Consecutive Ones
|
Add py solution for 600. Non-negative Integers without Consecutive Ones
600. Non-negative Integers without Consecutive Ones: https://leetcode.com/problems/non-negative-integers-without-consecutive-ones/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 600. Non-negative Integers without Consecutive Ones
600. Non-negative Integers without Consecutive Ones: https://leetcode.com/problems/non-negative-integers-without-consecutive-ones/
|
class Solution(object):
def findIntegers(self, num):
"""
:type num: int
:rtype: int
"""
for t in xrange(num.bit_length() - 1, 0, -1):
if ((1 << t) & num) and (1 << (t - 1) & num):
flag = True
for i in xrange(t, -1, -1):
if flag:
num |= (1 << i)
else:
num &= ~(1 << i)
flag = not flag
break
a, b = 1, 1
ans = 0
while num > 0:
if num & 1:
ans += a
num >>= 1
a, b = a + b, a
return ans + 1
|
<commit_before><commit_msg>Add py solution for 600. Non-negative Integers without Consecutive Ones
600. Non-negative Integers without Consecutive Ones: https://leetcode.com/problems/non-negative-integers-without-consecutive-ones/<commit_after>
|
class Solution(object):
def findIntegers(self, num):
"""
:type num: int
:rtype: int
"""
for t in xrange(num.bit_length() - 1, 0, -1):
if ((1 << t) & num) and (1 << (t - 1) & num):
flag = True
for i in xrange(t, -1, -1):
if flag:
num |= (1 << i)
else:
num &= ~(1 << i)
flag = not flag
break
a, b = 1, 1
ans = 0
while num > 0:
if num & 1:
ans += a
num >>= 1
a, b = a + b, a
return ans + 1
|
Add py solution for 600. Non-negative Integers without Consecutive Ones
600. Non-negative Integers without Consecutive Ones: https://leetcode.com/problems/non-negative-integers-without-consecutive-ones/class Solution(object):
def findIntegers(self, num):
"""
:type num: int
:rtype: int
"""
for t in xrange(num.bit_length() - 1, 0, -1):
if ((1 << t) & num) and (1 << (t - 1) & num):
flag = True
for i in xrange(t, -1, -1):
if flag:
num |= (1 << i)
else:
num &= ~(1 << i)
flag = not flag
break
a, b = 1, 1
ans = 0
while num > 0:
if num & 1:
ans += a
num >>= 1
a, b = a + b, a
return ans + 1
|
<commit_before><commit_msg>Add py solution for 600. Non-negative Integers without Consecutive Ones
600. Non-negative Integers without Consecutive Ones: https://leetcode.com/problems/non-negative-integers-without-consecutive-ones/<commit_after>class Solution(object):
def findIntegers(self, num):
"""
:type num: int
:rtype: int
"""
for t in xrange(num.bit_length() - 1, 0, -1):
if ((1 << t) & num) and (1 << (t - 1) & num):
flag = True
for i in xrange(t, -1, -1):
if flag:
num |= (1 << i)
else:
num &= ~(1 << i)
flag = not flag
break
a, b = 1, 1
ans = 0
while num > 0:
if num & 1:
ans += a
num >>= 1
a, b = a + b, a
return ans + 1
|
|
dddd91e030a6c9caa1d431ce95b07125f90675a9
|
features/subscriptions/migrations/0014_auto_20171102_1045.py
|
features/subscriptions/migrations/0014_auto_20171102_1045.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-02 09:45
from __future__ import unicode_literals
from django.db import migrations
from django.db.transaction import atomic
from django.db.utils import IntegrityError
def add_subscriptions_for_memberships(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Membership = apps.get_model('memberships', 'Membership')
Subscription = apps.get_model('subscriptions', 'Subscription')
for membership in Membership.objects.all():
try:
with atomic():
Subscription.objects.create(
subscribed_to_id=membership.group.id,
subscribed_to_type=ContentType.objects.get_for_model(membership.group),
subscriber=membership.member)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('memberships', '0014_auto_20170609_1029'),
('subscriptions', '0013_auto_20170918_1340'),
]
operations = [
migrations.RunPython(add_subscriptions_for_memberships),
]
|
Add subscriptions for all memberships
|
Add subscriptions for all memberships
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
Add subscriptions for all memberships
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-02 09:45
from __future__ import unicode_literals
from django.db import migrations
from django.db.transaction import atomic
from django.db.utils import IntegrityError
def add_subscriptions_for_memberships(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Membership = apps.get_model('memberships', 'Membership')
Subscription = apps.get_model('subscriptions', 'Subscription')
for membership in Membership.objects.all():
try:
with atomic():
Subscription.objects.create(
subscribed_to_id=membership.group.id,
subscribed_to_type=ContentType.objects.get_for_model(membership.group),
subscriber=membership.member)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('memberships', '0014_auto_20170609_1029'),
('subscriptions', '0013_auto_20170918_1340'),
]
operations = [
migrations.RunPython(add_subscriptions_for_memberships),
]
|
<commit_before><commit_msg>Add subscriptions for all memberships<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-02 09:45
from __future__ import unicode_literals
from django.db import migrations
from django.db.transaction import atomic
from django.db.utils import IntegrityError
def add_subscriptions_for_memberships(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Membership = apps.get_model('memberships', 'Membership')
Subscription = apps.get_model('subscriptions', 'Subscription')
for membership in Membership.objects.all():
try:
with atomic():
Subscription.objects.create(
subscribed_to_id=membership.group.id,
subscribed_to_type=ContentType.objects.get_for_model(membership.group),
subscriber=membership.member)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('memberships', '0014_auto_20170609_1029'),
('subscriptions', '0013_auto_20170918_1340'),
]
operations = [
migrations.RunPython(add_subscriptions_for_memberships),
]
|
Add subscriptions for all memberships# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-02 09:45
from __future__ import unicode_literals
from django.db import migrations
from django.db.transaction import atomic
from django.db.utils import IntegrityError
def add_subscriptions_for_memberships(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Membership = apps.get_model('memberships', 'Membership')
Subscription = apps.get_model('subscriptions', 'Subscription')
for membership in Membership.objects.all():
try:
with atomic():
Subscription.objects.create(
subscribed_to_id=membership.group.id,
subscribed_to_type=ContentType.objects.get_for_model(membership.group),
subscriber=membership.member)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('memberships', '0014_auto_20170609_1029'),
('subscriptions', '0013_auto_20170918_1340'),
]
operations = [
migrations.RunPython(add_subscriptions_for_memberships),
]
|
<commit_before><commit_msg>Add subscriptions for all memberships<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-02 09:45
from __future__ import unicode_literals
from django.db import migrations
from django.db.transaction import atomic
from django.db.utils import IntegrityError
def add_subscriptions_for_memberships(apps, schema_editor):
ContentType = apps.get_model('contenttypes', 'ContentType')
Membership = apps.get_model('memberships', 'Membership')
Subscription = apps.get_model('subscriptions', 'Subscription')
for membership in Membership.objects.all():
try:
with atomic():
Subscription.objects.create(
subscribed_to_id=membership.group.id,
subscribed_to_type=ContentType.objects.get_for_model(membership.group),
subscriber=membership.member)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('memberships', '0014_auto_20170609_1029'),
('subscriptions', '0013_auto_20170918_1340'),
]
operations = [
migrations.RunPython(add_subscriptions_for_memberships),
]
|
|
efcd7e49718867362d43eb651c233f6f0dd183cf
|
senlin_dashboard/test/api_tests/senlin_rest_tests.py
|
senlin_dashboard/test/api_tests/senlin_rest_tests.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.test import helpers as test
from senlin_dashboard.api.rest import senlin
class SenlinRestTestCase(test.TestCase):
#
# Receiver
#
@mock.patch.object(senlin, 'senlin')
def test_receivers_get(self, client):
request = self.mock_rest_request(**{'GET': {}})
client.receiver_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
], False, True)
response = senlin.Receivers().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(len(response.json['items']), 2)
self.assertEqual(response.json['has_more_data'], False)
self.assertEqual(response.json['has_prev_data'], True)
@mock.patch.object(senlin, 'senlin')
def test_receiver_get_single(self, client):
request = self.mock_rest_request()
client.receiver_get.return_value.to_dict.return_value = {
'name': 'test-receiver'}
response = senlin.Receiver().get(request, '1')
self.assertStatusCode(response, 200)
self.assertEqual(response.json['name'], 'test-receiver')
@mock.patch.object(senlin, 'senlin')
def test_receiver_delete(self, client):
request = self.mock_rest_request()
senlin.Receiver().delete(request, "1")
client.receiver_delete.assert_called_once_with(request, "1")
|
Add unit test for senlin-dashboard rest api
|
Add unit test for senlin-dashboard rest api
Change-Id: Id0372305d71063b0ef029c1c7fb1dac516ef088f
|
Python
|
apache-2.0
|
openstack/senlin-dashboard,stackforge/senlin-dashboard,stackforge/senlin-dashboard,openstack/senlin-dashboard,stackforge/senlin-dashboard,openstack/senlin-dashboard,openstack/senlin-dashboard
|
Add unit test for senlin-dashboard rest api
Change-Id: Id0372305d71063b0ef029c1c7fb1dac516ef088f
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.test import helpers as test
from senlin_dashboard.api.rest import senlin
class SenlinRestTestCase(test.TestCase):
#
# Receiver
#
@mock.patch.object(senlin, 'senlin')
def test_receivers_get(self, client):
request = self.mock_rest_request(**{'GET': {}})
client.receiver_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
], False, True)
response = senlin.Receivers().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(len(response.json['items']), 2)
self.assertEqual(response.json['has_more_data'], False)
self.assertEqual(response.json['has_prev_data'], True)
@mock.patch.object(senlin, 'senlin')
def test_receiver_get_single(self, client):
request = self.mock_rest_request()
client.receiver_get.return_value.to_dict.return_value = {
'name': 'test-receiver'}
response = senlin.Receiver().get(request, '1')
self.assertStatusCode(response, 200)
self.assertEqual(response.json['name'], 'test-receiver')
@mock.patch.object(senlin, 'senlin')
def test_receiver_delete(self, client):
request = self.mock_rest_request()
senlin.Receiver().delete(request, "1")
client.receiver_delete.assert_called_once_with(request, "1")
|
<commit_before><commit_msg>Add unit test for senlin-dashboard rest api
Change-Id: Id0372305d71063b0ef029c1c7fb1dac516ef088f<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.test import helpers as test
from senlin_dashboard.api.rest import senlin
class SenlinRestTestCase(test.TestCase):
#
# Receiver
#
@mock.patch.object(senlin, 'senlin')
def test_receivers_get(self, client):
request = self.mock_rest_request(**{'GET': {}})
client.receiver_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
], False, True)
response = senlin.Receivers().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(len(response.json['items']), 2)
self.assertEqual(response.json['has_more_data'], False)
self.assertEqual(response.json['has_prev_data'], True)
@mock.patch.object(senlin, 'senlin')
def test_receiver_get_single(self, client):
request = self.mock_rest_request()
client.receiver_get.return_value.to_dict.return_value = {
'name': 'test-receiver'}
response = senlin.Receiver().get(request, '1')
self.assertStatusCode(response, 200)
self.assertEqual(response.json['name'], 'test-receiver')
@mock.patch.object(senlin, 'senlin')
def test_receiver_delete(self, client):
request = self.mock_rest_request()
senlin.Receiver().delete(request, "1")
client.receiver_delete.assert_called_once_with(request, "1")
|
Add unit test for senlin-dashboard rest api
Change-Id: Id0372305d71063b0ef029c1c7fb1dac516ef088f# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.test import helpers as test
from senlin_dashboard.api.rest import senlin
class SenlinRestTestCase(test.TestCase):
#
# Receiver
#
@mock.patch.object(senlin, 'senlin')
def test_receivers_get(self, client):
request = self.mock_rest_request(**{'GET': {}})
client.receiver_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
], False, True)
response = senlin.Receivers().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(len(response.json['items']), 2)
self.assertEqual(response.json['has_more_data'], False)
self.assertEqual(response.json['has_prev_data'], True)
@mock.patch.object(senlin, 'senlin')
def test_receiver_get_single(self, client):
request = self.mock_rest_request()
client.receiver_get.return_value.to_dict.return_value = {
'name': 'test-receiver'}
response = senlin.Receiver().get(request, '1')
self.assertStatusCode(response, 200)
self.assertEqual(response.json['name'], 'test-receiver')
@mock.patch.object(senlin, 'senlin')
def test_receiver_delete(self, client):
request = self.mock_rest_request()
senlin.Receiver().delete(request, "1")
client.receiver_delete.assert_called_once_with(request, "1")
|
<commit_before><commit_msg>Add unit test for senlin-dashboard rest api
Change-Id: Id0372305d71063b0ef029c1c7fb1dac516ef088f<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from openstack_dashboard.test import helpers as test
from senlin_dashboard.api.rest import senlin
class SenlinRestTestCase(test.TestCase):
#
# Receiver
#
@mock.patch.object(senlin, 'senlin')
def test_receivers_get(self, client):
request = self.mock_rest_request(**{'GET': {}})
client.receiver_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
], False, True)
response = senlin.Receivers().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(len(response.json['items']), 2)
self.assertEqual(response.json['has_more_data'], False)
self.assertEqual(response.json['has_prev_data'], True)
@mock.patch.object(senlin, 'senlin')
def test_receiver_get_single(self, client):
request = self.mock_rest_request()
client.receiver_get.return_value.to_dict.return_value = {
'name': 'test-receiver'}
response = senlin.Receiver().get(request, '1')
self.assertStatusCode(response, 200)
self.assertEqual(response.json['name'], 'test-receiver')
@mock.patch.object(senlin, 'senlin')
def test_receiver_delete(self, client):
request = self.mock_rest_request()
senlin.Receiver().delete(request, "1")
client.receiver_delete.assert_called_once_with(request, "1")
|
|
0eb860c2bfbc015970fef517e835a49dd84812db
|
rcamp/mailer/migrations/0002_allocation_expiration_mailer_field_choices.py
|
rcamp/mailer/migrations/0002_allocation_expiration_mailer_field_choices.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mailer', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mailnotifier',
name='event',
field=models.CharField(max_length=128, choices=[(b'account_created_from_request', b'account_created_from_request'), (b'account_request_received', b'account_request_received'), (b'allocation_created_from_request', b'allocation_created_from_request'), (b'allocation_expired', b'allocation_expired'), (b'allocation_expiring', b'allocation_expiring'), (b'allocation_request_created_by_user', b'allocation_request_created_by_user'), (b'project_created_by_user', b'project_created_by_user')]),
),
]
|
Add db migration: expiration/expiring choices for mailer
|
Add db migration: expiration/expiring choices for mailer
|
Python
|
mit
|
ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP
|
Add db migration: expiration/expiring choices for mailer
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mailer', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mailnotifier',
name='event',
field=models.CharField(max_length=128, choices=[(b'account_created_from_request', b'account_created_from_request'), (b'account_request_received', b'account_request_received'), (b'allocation_created_from_request', b'allocation_created_from_request'), (b'allocation_expired', b'allocation_expired'), (b'allocation_expiring', b'allocation_expiring'), (b'allocation_request_created_by_user', b'allocation_request_created_by_user'), (b'project_created_by_user', b'project_created_by_user')]),
),
]
|
<commit_before><commit_msg>Add db migration: expiration/expiring choices for mailer<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mailer', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mailnotifier',
name='event',
field=models.CharField(max_length=128, choices=[(b'account_created_from_request', b'account_created_from_request'), (b'account_request_received', b'account_request_received'), (b'allocation_created_from_request', b'allocation_created_from_request'), (b'allocation_expired', b'allocation_expired'), (b'allocation_expiring', b'allocation_expiring'), (b'allocation_request_created_by_user', b'allocation_request_created_by_user'), (b'project_created_by_user', b'project_created_by_user')]),
),
]
|
Add db migration: expiration/expiring choices for mailer# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mailer', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mailnotifier',
name='event',
field=models.CharField(max_length=128, choices=[(b'account_created_from_request', b'account_created_from_request'), (b'account_request_received', b'account_request_received'), (b'allocation_created_from_request', b'allocation_created_from_request'), (b'allocation_expired', b'allocation_expired'), (b'allocation_expiring', b'allocation_expiring'), (b'allocation_request_created_by_user', b'allocation_request_created_by_user'), (b'project_created_by_user', b'project_created_by_user')]),
),
]
|
<commit_before><commit_msg>Add db migration: expiration/expiring choices for mailer<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mailer', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='mailnotifier',
name='event',
field=models.CharField(max_length=128, choices=[(b'account_created_from_request', b'account_created_from_request'), (b'account_request_received', b'account_request_received'), (b'allocation_created_from_request', b'allocation_created_from_request'), (b'allocation_expired', b'allocation_expired'), (b'allocation_expiring', b'allocation_expiring'), (b'allocation_request_created_by_user', b'allocation_request_created_by_user'), (b'project_created_by_user', b'project_created_by_user')]),
),
]
|
|
9d341072b6ebbd84380dd1ef8071d313244acc88
|
cryptchat/test/test_diffiehellman.py
|
cryptchat/test/test_diffiehellman.py
|
#/usr/bin/python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m cryptchat.test.test_diffiehellman
import unittest
from ..crypto.diffiehellman import DiffieHellman
class testDiffieHellman(unittest.TestCase):
def test_initiatevalid(self):
alice = DiffieHellman(group=5)
self.assertTrue(alice.keysize == 240)
alice = DiffieHellman(group=17)
self.assertTrue(alice.keysize == 540)
alice = DiffieHellman(group=18)
self.assertTrue(alice.keysize == 620)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Add basic test for diffiehellman
|
Add basic test for diffiehellman
|
Python
|
mit
|
djohsson/Cryptchat
|
Add basic test for diffiehellman
|
#/usr/bin/python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m cryptchat.test.test_diffiehellman
import unittest
from ..crypto.diffiehellman import DiffieHellman
class testDiffieHellman(unittest.TestCase):
def test_initiatevalid(self):
alice = DiffieHellman(group=5)
self.assertTrue(alice.keysize == 240)
alice = DiffieHellman(group=17)
self.assertTrue(alice.keysize == 540)
alice = DiffieHellman(group=18)
self.assertTrue(alice.keysize == 620)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add basic test for diffiehellman<commit_after>
|
#/usr/bin/python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m cryptchat.test.test_diffiehellman
import unittest
from ..crypto.diffiehellman import DiffieHellman
class testDiffieHellman(unittest.TestCase):
def test_initiatevalid(self):
alice = DiffieHellman(group=5)
self.assertTrue(alice.keysize == 240)
alice = DiffieHellman(group=17)
self.assertTrue(alice.keysize == 540)
alice = DiffieHellman(group=18)
self.assertTrue(alice.keysize == 620)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Add basic test for diffiehellman#/usr/bin/python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m cryptchat.test.test_diffiehellman
import unittest
from ..crypto.diffiehellman import DiffieHellman
class testDiffieHellman(unittest.TestCase):
def test_initiatevalid(self):
alice = DiffieHellman(group=5)
self.assertTrue(alice.keysize == 240)
alice = DiffieHellman(group=17)
self.assertTrue(alice.keysize == 540)
alice = DiffieHellman(group=18)
self.assertTrue(alice.keysize == 620)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add basic test for diffiehellman<commit_after>#/usr/bin/python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m cryptchat.test.test_diffiehellman
import unittest
from ..crypto.diffiehellman import DiffieHellman
class testDiffieHellman(unittest.TestCase):
def test_initiatevalid(self):
alice = DiffieHellman(group=5)
self.assertTrue(alice.keysize == 240)
alice = DiffieHellman(group=17)
self.assertTrue(alice.keysize == 540)
alice = DiffieHellman(group=18)
self.assertTrue(alice.keysize == 620)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
|
21572b5bef09bd60bd61b68241a155ddb2dd4444
|
tests/test_children.py
|
tests/test_children.py
|
# coding: pyxl
from pyxl import html
def test_no_filter():
children = <div><p>One</p><p>Two</p></div>.children()
assert len(children) == 2
assert str(children[0]) == '<p>One</p>'
assert str(children[1]) == '<p>Two</p>'
def test_class_filter():
div = <div><p class="yo">Hi</p><p>No</p><p class="yo">ho</p></div>
children = div.children(selector='.yo')
assert len(children) == 2
assert str(children[0]) == '<p class="yo">Hi</p>'
assert str(children[1]) == '<p class="yo">ho</p>'
def test_id_filter():
div = <div><p id="yo">Hi</p><p>No</p><p id="ho">ho</p></div>
children = div.children(selector='#yo')
assert len(children) == 1
assert str(children[0]) == '<p id="yo">Hi</p>'
def test_tag_filter():
div = <div><div><p>Yo</p></div><p>Hi</p></div>
children = div.children('p')
assert len(children) == 1
assert str(children[0]) == '<p>Hi</p>'
def test_filter_negation():
div = <div><p class="yo">Hi</p><p>No</p><p class="ho">ho</p></div>
children = div.children(selector='.yo', exclude=True)
assert len(children) == 2
assert str(children[0]) == '<p>No</p>'
assert str(children[1]) == '<p class="ho">ho</p>'
|
Add tests for children method.
|
Add tests for children method.
- all children are returned
- filter by tag
- filter by id
- filter by class
- negate filter (exclude)
|
Python
|
apache-2.0
|
pyxl4/pyxl4
|
Add tests for children method.
- all children are returned
- filter by tag
- filter by id
- filter by class
- negate filter (exclude)
|
# coding: pyxl
from pyxl import html
def test_no_filter():
children = <div><p>One</p><p>Two</p></div>.children()
assert len(children) == 2
assert str(children[0]) == '<p>One</p>'
assert str(children[1]) == '<p>Two</p>'
def test_class_filter():
div = <div><p class="yo">Hi</p><p>No</p><p class="yo">ho</p></div>
children = div.children(selector='.yo')
assert len(children) == 2
assert str(children[0]) == '<p class="yo">Hi</p>'
assert str(children[1]) == '<p class="yo">ho</p>'
def test_id_filter():
div = <div><p id="yo">Hi</p><p>No</p><p id="ho">ho</p></div>
children = div.children(selector='#yo')
assert len(children) == 1
assert str(children[0]) == '<p id="yo">Hi</p>'
def test_tag_filter():
div = <div><div><p>Yo</p></div><p>Hi</p></div>
children = div.children('p')
assert len(children) == 1
assert str(children[0]) == '<p>Hi</p>'
def test_filter_negation():
div = <div><p class="yo">Hi</p><p>No</p><p class="ho">ho</p></div>
children = div.children(selector='.yo', exclude=True)
assert len(children) == 2
assert str(children[0]) == '<p>No</p>'
assert str(children[1]) == '<p class="ho">ho</p>'
|
<commit_before><commit_msg>Add tests for children method.
- all children are returned
- filter by tag
- filter by id
- filter by class
- negate filter (exclude)<commit_after>
|
# coding: pyxl
from pyxl import html
def test_no_filter():
children = <div><p>One</p><p>Two</p></div>.children()
assert len(children) == 2
assert str(children[0]) == '<p>One</p>'
assert str(children[1]) == '<p>Two</p>'
def test_class_filter():
div = <div><p class="yo">Hi</p><p>No</p><p class="yo">ho</p></div>
children = div.children(selector='.yo')
assert len(children) == 2
assert str(children[0]) == '<p class="yo">Hi</p>'
assert str(children[1]) == '<p class="yo">ho</p>'
def test_id_filter():
div = <div><p id="yo">Hi</p><p>No</p><p id="ho">ho</p></div>
children = div.children(selector='#yo')
assert len(children) == 1
assert str(children[0]) == '<p id="yo">Hi</p>'
def test_tag_filter():
div = <div><div><p>Yo</p></div><p>Hi</p></div>
children = div.children('p')
assert len(children) == 1
assert str(children[0]) == '<p>Hi</p>'
def test_filter_negation():
div = <div><p class="yo">Hi</p><p>No</p><p class="ho">ho</p></div>
children = div.children(selector='.yo', exclude=True)
assert len(children) == 2
assert str(children[0]) == '<p>No</p>'
assert str(children[1]) == '<p class="ho">ho</p>'
|
Add tests for children method.
- all children are returned
- filter by tag
- filter by id
- filter by class
- negate filter (exclude)# coding: pyxl
from pyxl import html
def test_no_filter():
children = <div><p>One</p><p>Two</p></div>.children()
assert len(children) == 2
assert str(children[0]) == '<p>One</p>'
assert str(children[1]) == '<p>Two</p>'
def test_class_filter():
div = <div><p class="yo">Hi</p><p>No</p><p class="yo">ho</p></div>
children = div.children(selector='.yo')
assert len(children) == 2
assert str(children[0]) == '<p class="yo">Hi</p>'
assert str(children[1]) == '<p class="yo">ho</p>'
def test_id_filter():
div = <div><p id="yo">Hi</p><p>No</p><p id="ho">ho</p></div>
children = div.children(selector='#yo')
assert len(children) == 1
assert str(children[0]) == '<p id="yo">Hi</p>'
def test_tag_filter():
div = <div><div><p>Yo</p></div><p>Hi</p></div>
children = div.children('p')
assert len(children) == 1
assert str(children[0]) == '<p>Hi</p>'
def test_filter_negation():
div = <div><p class="yo">Hi</p><p>No</p><p class="ho">ho</p></div>
children = div.children(selector='.yo', exclude=True)
assert len(children) == 2
assert str(children[0]) == '<p>No</p>'
assert str(children[1]) == '<p class="ho">ho</p>'
|
<commit_before><commit_msg>Add tests for children method.
- all children are returned
- filter by tag
- filter by id
- filter by class
- negate filter (exclude)<commit_after># coding: pyxl
from pyxl import html
def test_no_filter():
children = <div><p>One</p><p>Two</p></div>.children()
assert len(children) == 2
assert str(children[0]) == '<p>One</p>'
assert str(children[1]) == '<p>Two</p>'
def test_class_filter():
div = <div><p class="yo">Hi</p><p>No</p><p class="yo">ho</p></div>
children = div.children(selector='.yo')
assert len(children) == 2
assert str(children[0]) == '<p class="yo">Hi</p>'
assert str(children[1]) == '<p class="yo">ho</p>'
def test_id_filter():
div = <div><p id="yo">Hi</p><p>No</p><p id="ho">ho</p></div>
children = div.children(selector='#yo')
assert len(children) == 1
assert str(children[0]) == '<p id="yo">Hi</p>'
def test_tag_filter():
div = <div><div><p>Yo</p></div><p>Hi</p></div>
children = div.children('p')
assert len(children) == 1
assert str(children[0]) == '<p>Hi</p>'
def test_filter_negation():
div = <div><p class="yo">Hi</p><p>No</p><p class="ho">ho</p></div>
children = div.children(selector='.yo', exclude=True)
assert len(children) == 2
assert str(children[0]) == '<p>No</p>'
assert str(children[1]) == '<p class="ho">ho</p>'
|
|
119196cc7d3932c09d5125d1d2f02fd0e6964b43
|
py/contiguous-array.py
|
py/contiguous-array.py
|
class Solution(object):
def findMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pos = {0: -1}
v = 0
ans = 0
for i, n in enumerate(nums):
v += n * 2 - 1
if v in pos:
ans = max(ans, i - pos[v])
else:
pos[v] = i
return ans
|
Add py solution for 525. Contiguous Array
|
Add py solution for 525. Contiguous Array
525. Contiguous Array: https://leetcode.com/problems/contiguous-array/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 525. Contiguous Array
525. Contiguous Array: https://leetcode.com/problems/contiguous-array/
|
class Solution(object):
def findMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pos = {0: -1}
v = 0
ans = 0
for i, n in enumerate(nums):
v += n * 2 - 1
if v in pos:
ans = max(ans, i - pos[v])
else:
pos[v] = i
return ans
|
<commit_before><commit_msg>Add py solution for 525. Contiguous Array
525. Contiguous Array: https://leetcode.com/problems/contiguous-array/<commit_after>
|
class Solution(object):
def findMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pos = {0: -1}
v = 0
ans = 0
for i, n in enumerate(nums):
v += n * 2 - 1
if v in pos:
ans = max(ans, i - pos[v])
else:
pos[v] = i
return ans
|
Add py solution for 525. Contiguous Array
525. Contiguous Array: https://leetcode.com/problems/contiguous-array/class Solution(object):
def findMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pos = {0: -1}
v = 0
ans = 0
for i, n in enumerate(nums):
v += n * 2 - 1
if v in pos:
ans = max(ans, i - pos[v])
else:
pos[v] = i
return ans
|
<commit_before><commit_msg>Add py solution for 525. Contiguous Array
525. Contiguous Array: https://leetcode.com/problems/contiguous-array/<commit_after>class Solution(object):
def findMaxLength(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pos = {0: -1}
v = 0
ans = 0
for i, n in enumerate(nums):
v += n * 2 - 1
if v in pos:
ans = max(ans, i - pos[v])
else:
pos[v] = i
return ans
|
|
70ba2d36dfcd0613096b8b41c15c34bacd8b72e8
|
tests/test_sscanf.py
|
tests/test_sscanf.py
|
import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.sscanf')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_sscanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/sscanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"0x worked\n", "+0x worked\n", "base +16 worked\n", "base 16 worked\n",
"-0x worked\n", "base -16 worked\n", "Nope x\n",
"base 8 worked\n", "base +8 worked\n", "base +10 worked\n", "base 10 worked\n",
"base -8 worked\n", "base -10 worked\n", "Nope u\n",
"No switch\n",
}
pg.explore(find=0x400939, num_find=len(expected_outputs))
nose.tools.assert_equal(len(pg.found), len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen(test_bin, stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
def test_sscanf():
yield run_sscanf, None
yield run_sscanf, 8
if __name__ == "__main__":
run_sscanf(4)
|
Add test case for sscanf
|
Add test case for sscanf
|
Python
|
bsd-2-clause
|
haylesr/angr,axt/angr,chubbymaggie/angr,axt/angr,schieb/angr,tyb0807/angr,angr/angr,axt/angr,iamahuman/angr,schieb/angr,angr/angr,iamahuman/angr,schieb/angr,chubbymaggie/angr,chubbymaggie/angr,tyb0807/angr,f-prettyland/angr,f-prettyland/angr,tyb0807/angr,angr/angr,haylesr/angr,f-prettyland/angr,iamahuman/angr
|
Add test case for sscanf
|
import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.sscanf')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_sscanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/sscanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"0x worked\n", "+0x worked\n", "base +16 worked\n", "base 16 worked\n",
"-0x worked\n", "base -16 worked\n", "Nope x\n",
"base 8 worked\n", "base +8 worked\n", "base +10 worked\n", "base 10 worked\n",
"base -8 worked\n", "base -10 worked\n", "Nope u\n",
"No switch\n",
}
pg.explore(find=0x400939, num_find=len(expected_outputs))
nose.tools.assert_equal(len(pg.found), len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen(test_bin, stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
def test_sscanf():
yield run_sscanf, None
yield run_sscanf, 8
if __name__ == "__main__":
run_sscanf(4)
|
<commit_before><commit_msg>Add test case for sscanf<commit_after>
|
import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.sscanf')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_sscanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/sscanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"0x worked\n", "+0x worked\n", "base +16 worked\n", "base 16 worked\n",
"-0x worked\n", "base -16 worked\n", "Nope x\n",
"base 8 worked\n", "base +8 worked\n", "base +10 worked\n", "base 10 worked\n",
"base -8 worked\n", "base -10 worked\n", "Nope u\n",
"No switch\n",
}
pg.explore(find=0x400939, num_find=len(expected_outputs))
nose.tools.assert_equal(len(pg.found), len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen(test_bin, stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
def test_sscanf():
yield run_sscanf, None
yield run_sscanf, 8
if __name__ == "__main__":
run_sscanf(4)
|
Add test case for sscanfimport nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.sscanf')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_sscanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/sscanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"0x worked\n", "+0x worked\n", "base +16 worked\n", "base 16 worked\n",
"-0x worked\n", "base -16 worked\n", "Nope x\n",
"base 8 worked\n", "base +8 worked\n", "base +10 worked\n", "base 10 worked\n",
"base -8 worked\n", "base -10 worked\n", "Nope u\n",
"No switch\n",
}
pg.explore(find=0x400939, num_find=len(expected_outputs))
nose.tools.assert_equal(len(pg.found), len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen(test_bin, stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
def test_sscanf():
yield run_sscanf, None
yield run_sscanf, 8
if __name__ == "__main__":
run_sscanf(4)
|
<commit_before><commit_msg>Add test case for sscanf<commit_after>import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.sscanf')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_sscanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/sscanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"0x worked\n", "+0x worked\n", "base +16 worked\n", "base 16 worked\n",
"-0x worked\n", "base -16 worked\n", "Nope x\n",
"base 8 worked\n", "base +8 worked\n", "base +10 worked\n", "base 10 worked\n",
"base -8 worked\n", "base -10 worked\n", "Nope u\n",
"No switch\n",
}
pg.explore(find=0x400939, num_find=len(expected_outputs))
nose.tools.assert_equal(len(pg.found), len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen(test_bin, stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
def test_sscanf():
yield run_sscanf, None
yield run_sscanf, 8
if __name__ == "__main__":
run_sscanf(4)
|
|
e92bf205247acffc353529b1a26e7da2acb8d2fb
|
giveaminute/migrations/versions/008_Fix_invalid_event_ids_in_the_needs_table.py
|
giveaminute/migrations/versions/008_Fix_invalid_event_ids_in_the_needs_table.py
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
migrate_engine.execute("""
UPDATE project_need
SET event_id=NULL
WHERE event_id=0
""")
def downgrade(migrate_engine):
# I don't really care about the downgrade for this one. Who wants invalid
# data?
pass
|
Add a migration to fix bad event_id values
|
Add a migration to fix bad event_id values
|
Python
|
agpl-3.0
|
codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam
|
Add a migration to fix bad event_id values
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
migrate_engine.execute("""
UPDATE project_need
SET event_id=NULL
WHERE event_id=0
""")
def downgrade(migrate_engine):
# I don't really care about the downgrade for this one. Who wants invalid
# data?
pass
|
<commit_before><commit_msg>Add a migration to fix bad event_id values<commit_after>
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
migrate_engine.execute("""
UPDATE project_need
SET event_id=NULL
WHERE event_id=0
""")
def downgrade(migrate_engine):
# I don't really care about the downgrade for this one. Who wants invalid
# data?
pass
|
Add a migration to fix bad event_id valuesfrom sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
migrate_engine.execute("""
UPDATE project_need
SET event_id=NULL
WHERE event_id=0
""")
def downgrade(migrate_engine):
# I don't really care about the downgrade for this one. Who wants invalid
# data?
pass
|
<commit_before><commit_msg>Add a migration to fix bad event_id values<commit_after>from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
migrate_engine.execute("""
UPDATE project_need
SET event_id=NULL
WHERE event_id=0
""")
def downgrade(migrate_engine):
# I don't really care about the downgrade for this one. Who wants invalid
# data?
pass
|
|
ea311ee5dee288f1efd9ea2d098d0873f152d6f7
|
alembic/versions/54e898dc3251_bug_878109_shut_cami.py
|
alembic/versions/54e898dc3251_bug_878109_shut_cami.py
|
"""bug 878109 shut camino down
Revision ID: 54e898dc3251
Revises: 471c6efadde
Create Date: 2013-07-03 09:21:07.627571
"""
# revision identifiers, used by Alembic.
revision = '54e898dc3251'
down_revision = '471c6efadde'
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy import types
from sqlalchemy.sql import table, column
def upgrade():
app_path=os.getcwd()
procs = [ '001_update_reports_clean.sql' ]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
def downgrade():
### Nothing to do here
pass
|
Add migration for camino shutdown
|
Add migration for camino shutdown
|
Python
|
mpl-2.0
|
bsmedberg/socorro,cliqz/socorro,Tchanders/socorro,yglazko/socorro,linearregression/socorro,pcabido/socorro,adngdb/socorro,KaiRo-at/socorro,AdrianGaudebert/socorro,cliqz/socorro,spthaolt/socorro,KaiRo-at/socorro,Tayamarn/socorro,pcabido/socorro,luser/socorro,Tayamarn/socorro,Tayamarn/socorro,bsmedberg/socorro,Tayamarn/socorro,AdrianGaudebert/socorro,lonnen/socorro,linearregression/socorro,pcabido/socorro,twobraids/socorro,cliqz/socorro,Tchanders/socorro,pcabido/socorro,cliqz/socorro,luser/socorro,AdrianGaudebert/socorro,pcabido/socorro,pcabido/socorro,mozilla/socorro,cliqz/socorro,luser/socorro,bsmedberg/socorro,AdrianGaudebert/socorro,KaiRo-at/socorro,mozilla/socorro,rhelmer/socorro,rhelmer/socorro,lonnen/socorro,mozilla/socorro,Tayamarn/socorro,adngdb/socorro,spthaolt/socorro,KaiRo-at/socorro,yglazko/socorro,Serg09/socorro,KaiRo-at/socorro,m8ttyB/socorro,adngdb/socorro,Serg09/socorro,adngdb/socorro,bsmedberg/socorro,mozilla/socorro,luser/socorro,Tchanders/socorro,m8ttyB/socorro,cliqz/socorro,spthaolt/socorro,m8ttyB/socorro,luser/socorro,Serg09/socorro,mozilla/socorro,rhelmer/socorro,twobraids/socorro,Tchanders/socorro,m8ttyB/socorro,twobraids/socorro,linearregression/socorro,Serg09/socorro,bsmedberg/socorro,twobraids/socorro,spthaolt/socorro,mozilla/socorro,Serg09/socorro,rhelmer/socorro,m8ttyB/socorro,KaiRo-at/socorro,rhelmer/socorro,rhelmer/socorro,spthaolt/socorro,twobraids/socorro,luser/socorro,Tchanders/socorro,yglazko/socorro,spthaolt/socorro,yglazko/socorro,AdrianGaudebert/socorro,adngdb/socorro,twobraids/socorro,linearregression/socorro,yglazko/socorro,yglazko/socorro,linearregression/socorro,Tchanders/socorro,linearregression/socorro,lonnen/socorro,m8ttyB/socorro,adngdb/socorro,lonnen/socorro,Tayamarn/socorro,Serg09/socorro,AdrianGaudebert/socorro
|
Add migration for camino shutdown
|
"""bug 878109 shut camino down
Revision ID: 54e898dc3251
Revises: 471c6efadde
Create Date: 2013-07-03 09:21:07.627571
"""
# revision identifiers, used by Alembic.
revision = '54e898dc3251'
down_revision = '471c6efadde'
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy import types
from sqlalchemy.sql import table, column
def upgrade():
app_path=os.getcwd()
procs = [ '001_update_reports_clean.sql' ]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
def downgrade():
### Nothing to do here
pass
|
<commit_before><commit_msg>Add migration for camino shutdown<commit_after>
|
"""bug 878109 shut camino down
Revision ID: 54e898dc3251
Revises: 471c6efadde
Create Date: 2013-07-03 09:21:07.627571
"""
# revision identifiers, used by Alembic.
revision = '54e898dc3251'
down_revision = '471c6efadde'
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy import types
from sqlalchemy.sql import table, column
def upgrade():
app_path=os.getcwd()
procs = [ '001_update_reports_clean.sql' ]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
def downgrade():
### Nothing to do here
pass
|
Add migration for camino shutdown"""bug 878109 shut camino down
Revision ID: 54e898dc3251
Revises: 471c6efadde
Create Date: 2013-07-03 09:21:07.627571
"""
# revision identifiers, used by Alembic.
revision = '54e898dc3251'
down_revision = '471c6efadde'
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy import types
from sqlalchemy.sql import table, column
def upgrade():
app_path=os.getcwd()
procs = [ '001_update_reports_clean.sql' ]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
def downgrade():
### Nothing to do here
pass
|
<commit_before><commit_msg>Add migration for camino shutdown<commit_after>"""bug 878109 shut camino down
Revision ID: 54e898dc3251
Revises: 471c6efadde
Create Date: 2013-07-03 09:21:07.627571
"""
# revision identifiers, used by Alembic.
revision = '54e898dc3251'
down_revision = '471c6efadde'
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy import types
from sqlalchemy.sql import table, column
def upgrade():
app_path=os.getcwd()
procs = [ '001_update_reports_clean.sql' ]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
def downgrade():
### Nothing to do here
pass
|
|
8c7d1bc554e6b5bbb7900a2f6d976d72795bb454
|
tools/commitstats.py
|
tools/commitstats.py
|
# Run svn log -l <some number>
import re
import numpy as np
import os
names = re.compile(r'r\d+\s[|]\s(.*)\s[|]\s200')
def get_count(filename, repo):
mystr = open(filename).read()
result = names.findall(mystr)
u = np.unique(result)
count = [(x,result.count(x),repo) for x in u]
return count
command = 'svn log -l 2300 > output.txt'
os.chdir('..')
os.system(command)
count = get_count('output.txt', 'NumPy')
os.chdir('../scipy')
os.system(command)
count.extend(get_count('output.txt', 'SciPy'))
os.chdir('../scikits')
os.system(command)
count.extend(get_count('output.txt', 'SciKits'))
count.sort()
print "** SciPy and NumPy **"
print "====================="
for val in count:
print val
|
Add a tool for determining active SVN committers.
|
Add a tool for determining active SVN committers.
|
Python
|
bsd-3-clause
|
yiakwy/numpy,GaZ3ll3/numpy,andsor/numpy,mattip/numpy,rgommers/numpy,Anwesh43/numpy,chatcannon/numpy,KaelChen/numpy,ekalosak/numpy,b-carter/numpy,jorisvandenbossche/numpy,bringingheavendown/numpy,Linkid/numpy,mortada/numpy,abalkin/numpy,pyparallel/numpy,has2k1/numpy,ESSS/numpy,tacaswell/numpy,astrofrog/numpy,naritta/numpy,solarjoe/numpy,tdsmith/numpy,stefanv/numpy,ewmoore/numpy,gmcastil/numpy,endolith/numpy,astrofrog/numpy,rgommers/numpy,ddasilva/numpy,kirillzhuravlev/numpy,grlee77/numpy,ddasilva/numpy,WillieMaddox/numpy,jonathanunderwood/numpy,yiakwy/numpy,jankoslavic/numpy,pbrod/numpy,mathdd/numpy,rajathkumarmp/numpy,SunghanKim/numpy,BMJHayward/numpy,kiwifb/numpy,bertrand-l/numpy,empeeu/numpy,ssanderson/numpy,grlee77/numpy,jorisvandenbossche/numpy,mortada/numpy,hainm/numpy,mwiebe/numpy,ajdawson/numpy,sigma-random/numpy,numpy/numpy,cjermain/numpy,dato-code/numpy,ChanderG/numpy,skymanaditya1/numpy,trankmichael/numpy,cjermain/numpy,jakirkham/numpy,NextThought/pypy-numpy,Dapid/numpy,ContinuumIO/numpy,jonathanunderwood/numpy,seberg/numpy,dch312/numpy,cjermain/numpy,dch312/numpy,kirillzhuravlev/numpy,nguyentu1602/numpy,gfyoung/numpy,numpy/numpy,MaPePeR/numpy,ChanderG/numpy,simongibbons/numpy,jakirkham/numpy,rajathkumarmp/numpy,sigma-random/numpy,grlee77/numpy,pizzathief/numpy,dimasad/numpy,brandon-rhodes/numpy,jankoslavic/numpy,sonnyhu/numpy,chiffa/numpy,bertrand-l/numpy,groutr/numpy,jorisvandenbossche/numpy,ekalosak/numpy,pizzathief/numpy,gmcastil/numpy,ContinuumIO/numpy,sinhrks/numpy,simongibbons/numpy,rhythmsosad/numpy,MSeifert04/numpy,gfyoung/numpy,anntzer/numpy,Srisai85/numpy,andsor/numpy,shoyer/numpy,stuarteberg/numpy,joferkington/numpy,bertrand-l/numpy,dato-code/numpy,jankoslavic/numpy,NextThought/pypy-numpy,ogrisel/numpy,pdebuyl/numpy,MSeifert04/numpy,charris/numpy,Yusa95/numpy,trankmichael/numpy,sigma-random/numpy,anntzer/numpy,moreati/numpy,madphysicist/numpy,ewmoore/numpy,felipebetancur/numpy,dwillmer/numpy,skymanaditya1/numpy,embray/numpy,pdebuyl/numpy,shoyer/numpy,tacaswell/numpy,Eric89GXL/numpy,cjermain/numpy,jonathanunderwood/numpy,mindw/numpy,charris/numpy,brandon-rhodes/numpy,GaZ3ll3/numpy,pelson/numpy,rudimeier/numpy,groutr/numpy,immerrr/numpy,dch312/numpy,njase/numpy,sonnyhu/numpy,BMJHayward/numpy,simongibbons/numpy,trankmichael/numpy,kiwifb/numpy,SiccarPoint/numpy,astrofrog/numpy,SunghanKim/numpy,ChanderG/numpy,numpy/numpy-refactor,pizzathief/numpy,b-carter/numpy,grlee77/numpy,musically-ut/numpy,solarjoe/numpy,dwf/numpy,ekalosak/numpy,ahaldane/numpy,Dapid/numpy,utke1/numpy,nbeaver/numpy,tynn/numpy,ESSS/numpy,mingwpy/numpy,pelson/numpy,skwbc/numpy,shoyer/numpy,KaelChen/numpy,ogrisel/numpy,GaZ3ll3/numpy,rherault-insa/numpy,trankmichael/numpy,rudimeier/numpy,matthew-brett/numpy,dch312/numpy,mhvk/numpy,jschueller/numpy,mathdd/numpy,rajathkumarmp/numpy,jorisvandenbossche/numpy,pizzathief/numpy,shoyer/numpy,rmcgibbo/numpy,mhvk/numpy,Yusa95/numpy,WarrenWeckesser/numpy,endolith/numpy,stefanv/numpy,jankoslavic/numpy,SiccarPoint/numpy,WillieMaddox/numpy,jakirkham/numpy,ContinuumIO/numpy,joferkington/numpy,Linkid/numpy,maniteja123/numpy,rhythmsosad/numpy,leifdenby/numpy,dwf/numpy,rhythmsosad/numpy,nbeaver/numpy,leifdenby/numpy,musically-ut/numpy,AustereCuriosity/numpy,dwillmer/numpy,nguyentu1602/numpy,ssanderson/numpy,njase/numpy,chatcannon/numpy,behzadnouri/numpy,Yusa95/numpy,has2k1/numpy,mindw/numpy,mhvk/numpy,mattip/numpy,moreati/numpy,naritta/numpy,jschueller/numpy,ESSS/numpy,ChristopherHogan/numpy,jorisvandenbossche/numpy,ajdawson/numpy,madphysicist/numpy,ChristopherHogan/numpy,ajdawson/numpy,NextThought/pypy-numpy,sinhrks/numpy,pelson/numpy,tynn/numpy,joferkington/numpy,chiffa/numpy,ahaldane/numpy,matthew-brett/numpy,mhvk/numpy,felipebetancur/numpy,larsmans/numpy,rgommers/numpy,rmcgibbo/numpy,ewmoore/numpy,rherault-insa/numpy,nguyentu1602/numpy,hainm/numpy,sonnyhu/numpy,jakirkham/numpy,ewmoore/numpy,SiccarPoint/numpy,matthew-brett/numpy,AustereCuriosity/numpy,pyparallel/numpy,charris/numpy,mathdd/numpy,mindw/numpy,argriffing/numpy,SunghanKim/numpy,madphysicist/numpy,mathdd/numpy,numpy/numpy,WarrenWeckesser/numpy,naritta/numpy,GrimDerp/numpy,WarrenWeckesser/numpy,cowlicks/numpy,numpy/numpy-refactor,mortada/numpy,mhvk/numpy,mingwpy/numpy,chatcannon/numpy,BabeNovelty/numpy,immerrr/numpy,ChanderG/numpy,felipebetancur/numpy,behzadnouri/numpy,anntzer/numpy,dato-code/numpy,shoyer/numpy,stefanv/numpy,sigma-random/numpy,Eric89GXL/numpy,pbrod/numpy,hainm/numpy,naritta/numpy,MichaelAquilina/numpy,behzadnouri/numpy,immerrr/numpy,kirillzhuravlev/numpy,rherault-insa/numpy,jakirkham/numpy,embray/numpy,rajathkumarmp/numpy,BabeNovelty/numpy,ChristopherHogan/numpy,madphysicist/numpy,BabeNovelty/numpy,simongibbons/numpy,seberg/numpy,githubmlai/numpy,rudimeier/numpy,yiakwy/numpy,simongibbons/numpy,utke1/numpy,nguyentu1602/numpy,stuarteberg/numpy,musically-ut/numpy,BabeNovelty/numpy,stefanv/numpy,CMartelLML/numpy,ogrisel/numpy,dimasad/numpy,githubmlai/numpy,empeeu/numpy,rmcgibbo/numpy,mindw/numpy,MaPePeR/numpy,jschueller/numpy,matthew-brett/numpy,mattip/numpy,endolith/numpy,skymanaditya1/numpy,githubmlai/numpy,skwbc/numpy,mwiebe/numpy,AustereCuriosity/numpy,sonnyhu/numpy,Anwesh43/numpy,bmorris3/numpy,empeeu/numpy,rhythmsosad/numpy,MaPePeR/numpy,bmorris3/numpy,WarrenWeckesser/numpy,skwbc/numpy,GaZ3ll3/numpy,musically-ut/numpy,ahaldane/numpy,charris/numpy,abalkin/numpy,maniteja123/numpy,dwillmer/numpy,pbrod/numpy,Srisai85/numpy,rudimeier/numpy,mortada/numpy,Anwesh43/numpy,endolith/numpy,felipebetancur/numpy,jschueller/numpy,MSeifert04/numpy,githubmlai/numpy,maniteja123/numpy,tynn/numpy,MichaelAquilina/numpy,brandon-rhodes/numpy,cowlicks/numpy,dwf/numpy,KaelChen/numpy,drasmuss/numpy,ChristopherHogan/numpy,has2k1/numpy,sinhrks/numpy,MaPePeR/numpy,cowlicks/numpy,gfyoung/numpy,larsmans/numpy,WarrenWeckesser/numpy,ogrisel/numpy,seberg/numpy,nbeaver/numpy,tdsmith/numpy,Dapid/numpy,Yusa95/numpy,rgommers/numpy,Linkid/numpy,tdsmith/numpy,pelson/numpy,ogrisel/numpy,GrimDerp/numpy,WillieMaddox/numpy,groutr/numpy,NextThought/pypy-numpy,gmcastil/numpy,Eric89GXL/numpy,numpy/numpy-refactor,joferkington/numpy,empeeu/numpy,stefanv/numpy,bringingheavendown/numpy,CMartelLML/numpy,mwiebe/numpy,mingwpy/numpy,skymanaditya1/numpy,ewmoore/numpy,bmorris3/numpy,cowlicks/numpy,sinhrks/numpy,mingwpy/numpy,seberg/numpy,numpy/numpy,grlee77/numpy,ViralLeadership/numpy,dwf/numpy,embray/numpy,pyparallel/numpy,tdsmith/numpy,MSeifert04/numpy,tacaswell/numpy,ajdawson/numpy,GrimDerp/numpy,b-carter/numpy,leifdenby/numpy,ahaldane/numpy,KaelChen/numpy,argriffing/numpy,pizzathief/numpy,astrofrog/numpy,Linkid/numpy,dwillmer/numpy,mattip/numpy,argriffing/numpy,dimasad/numpy,ViralLeadership/numpy,astrofrog/numpy,rmcgibbo/numpy,numpy/numpy-refactor,moreati/numpy,Anwesh43/numpy,MichaelAquilina/numpy,embray/numpy,matthew-brett/numpy,brandon-rhodes/numpy,abalkin/numpy,kiwifb/numpy,njase/numpy,ViralLeadership/numpy,numpy/numpy-refactor,ssanderson/numpy,bmorris3/numpy,Srisai85/numpy,ddasilva/numpy,stuarteberg/numpy,utke1/numpy,GrimDerp/numpy,stuarteberg/numpy,pdebuyl/numpy,pelson/numpy,pbrod/numpy,CMartelLML/numpy,ekalosak/numpy,pdebuyl/numpy,has2k1/numpy,yiakwy/numpy,larsmans/numpy,andsor/numpy,chiffa/numpy,Eric89GXL/numpy,drasmuss/numpy,pbrod/numpy,dwf/numpy,CMartelLML/numpy,larsmans/numpy,SiccarPoint/numpy,madphysicist/numpy,solarjoe/numpy,Srisai85/numpy,immerrr/numpy,bringingheavendown/numpy,hainm/numpy,BMJHayward/numpy,embray/numpy,SunghanKim/numpy,MichaelAquilina/numpy,anntzer/numpy,MSeifert04/numpy,dimasad/numpy,drasmuss/numpy,ahaldane/numpy,kirillzhuravlev/numpy,BMJHayward/numpy,dato-code/numpy,andsor/numpy
|
Add a tool for determining active SVN committers.
|
# Run svn log -l <some number>
import re
import numpy as np
import os
names = re.compile(r'r\d+\s[|]\s(.*)\s[|]\s200')
def get_count(filename, repo):
mystr = open(filename).read()
result = names.findall(mystr)
u = np.unique(result)
count = [(x,result.count(x),repo) for x in u]
return count
command = 'svn log -l 2300 > output.txt'
os.chdir('..')
os.system(command)
count = get_count('output.txt', 'NumPy')
os.chdir('../scipy')
os.system(command)
count.extend(get_count('output.txt', 'SciPy'))
os.chdir('../scikits')
os.system(command)
count.extend(get_count('output.txt', 'SciKits'))
count.sort()
print "** SciPy and NumPy **"
print "====================="
for val in count:
print val
|
<commit_before><commit_msg>Add a tool for determining active SVN committers.<commit_after>
|
# Run svn log -l <some number>
import re
import numpy as np
import os
names = re.compile(r'r\d+\s[|]\s(.*)\s[|]\s200')
def get_count(filename, repo):
mystr = open(filename).read()
result = names.findall(mystr)
u = np.unique(result)
count = [(x,result.count(x),repo) for x in u]
return count
command = 'svn log -l 2300 > output.txt'
os.chdir('..')
os.system(command)
count = get_count('output.txt', 'NumPy')
os.chdir('../scipy')
os.system(command)
count.extend(get_count('output.txt', 'SciPy'))
os.chdir('../scikits')
os.system(command)
count.extend(get_count('output.txt', 'SciKits'))
count.sort()
print "** SciPy and NumPy **"
print "====================="
for val in count:
print val
|
Add a tool for determining active SVN committers.
# Run svn log -l <some number>
import re
import numpy as np
import os
names = re.compile(r'r\d+\s[|]\s(.*)\s[|]\s200')
def get_count(filename, repo):
mystr = open(filename).read()
result = names.findall(mystr)
u = np.unique(result)
count = [(x,result.count(x),repo) for x in u]
return count
command = 'svn log -l 2300 > output.txt'
os.chdir('..')
os.system(command)
count = get_count('output.txt', 'NumPy')
os.chdir('../scipy')
os.system(command)
count.extend(get_count('output.txt', 'SciPy'))
os.chdir('../scikits')
os.system(command)
count.extend(get_count('output.txt', 'SciKits'))
count.sort()
print "** SciPy and NumPy **"
print "====================="
for val in count:
print val
|
<commit_before><commit_msg>Add a tool for determining active SVN committers.<commit_after>
# Run svn log -l <some number>
import re
import numpy as np
import os
names = re.compile(r'r\d+\s[|]\s(.*)\s[|]\s200')
def get_count(filename, repo):
mystr = open(filename).read()
result = names.findall(mystr)
u = np.unique(result)
count = [(x,result.count(x),repo) for x in u]
return count
command = 'svn log -l 2300 > output.txt'
os.chdir('..')
os.system(command)
count = get_count('output.txt', 'NumPy')
os.chdir('../scipy')
os.system(command)
count.extend(get_count('output.txt', 'SciPy'))
os.chdir('../scikits')
os.system(command)
count.extend(get_count('output.txt', 'SciKits'))
count.sort()
print "** SciPy and NumPy **"
print "====================="
for val in count:
print val
|
|
933fdbbd2f09c3ab6869dbbc6c30c698576eb3fd
|
demos/py_simple/loaders_example.py
|
demos/py_simple/loaders_example.py
|
#!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 2:
print("Takes an image as an argument")
sys.exit(1)
# Load Image
img = loaders.LoadImage(sys.argv[2], None)
# Save result
loaders.SavePNG("out.png", img, None)
if __name__ == '__main__':
main()
|
Add very simple loaders python example.
|
examples: Add very simple loaders python example.
|
Python
|
lgpl-2.1
|
gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim
|
examples: Add very simple loaders python example.
|
#!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 2:
print("Takes an image as an argument")
sys.exit(1)
# Load Image
img = loaders.LoadImage(sys.argv[2], None)
# Save result
loaders.SavePNG("out.png", img, None)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>examples: Add very simple loaders python example.<commit_after>
|
#!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 2:
print("Takes an image as an argument")
sys.exit(1)
# Load Image
img = loaders.LoadImage(sys.argv[2], None)
# Save result
loaders.SavePNG("out.png", img, None)
if __name__ == '__main__':
main()
|
examples: Add very simple loaders python example.#!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 2:
print("Takes an image as an argument")
sys.exit(1)
# Load Image
img = loaders.LoadImage(sys.argv[2], None)
# Save result
loaders.SavePNG("out.png", img, None)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>examples: Add very simple loaders python example.<commit_after>#!/usr/bin/env python
import sys
import gfxprim.core as core
import gfxprim.loaders as loaders
import gfxprim.filters as filters
def main():
if len(sys.argv) != 2:
print("Takes an image as an argument")
sys.exit(1)
# Load Image
img = loaders.LoadImage(sys.argv[2], None)
# Save result
loaders.SavePNG("out.png", img, None)
if __name__ == '__main__':
main()
|
|
4b0ed3efd88a1f6ee9ed2a3ea5bb0401cc02122d
|
creative/app/test/test_forms.py
|
creative/app/test/test_forms.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from flask import Flask
import forms
COMPLETE_QUESTION1 = {
'question1': 'question',
'answer1a': 'answerA',
'answer1b': 'answerB',
'answer1c': 'answerC',
'answer1d': 'answerD',
'answer1anext': '2',
'answer1bnext': '2',
'answer1cnext': '2',
'answer1dnext': '2'
}
class QuestionFormTest(unittest.TestCase):
def create_form(self, data):
app = Flask(__name__)
with app.app_context():
app.config['WTF_CSRF_ENABLED'] = False
return forms.QuestionForm(data=data)
def test_question_section_is_empty_when_question_is_valid(self):
valid_form = self.create_form(COMPLETE_QUESTION1)
test_valid = forms.question_section_is_empty(valid_form, '1')
self.assertFalse(test_valid)
def test_question_section_is_empty_when_question_is_invalid(self):
invalid_question_cases = [dict(COMPLETE_QUESTION1, question1=None),
dict(COMPLETE_QUESTION1, answer1a=None),
dict(COMPLETE_QUESTION1, answer1b=None),
dict(COMPLETE_QUESTION1, answer1c=None),
dict(COMPLETE_QUESTION1, answer1d=None),
dict(COMPLETE_QUESTION1, answer1anext=None),
dict(COMPLETE_QUESTION1, answer1bnext=None),
dict(COMPLETE_QUESTION1, answer1cnext=None),
dict(COMPLETE_QUESTION1, answer1dnext=None)
]
for invalid_question in invalid_question_cases:
invalid_form = self.create_form(invalid_question)
test_invalid = forms.question_section_is_empty(invalid_form, '1')
self.assertTrue(test_invalid)
|
Add unit tests for test_question_section_is_empty
|
Add unit tests for test_question_section_is_empty
Change-Id: Ic83098dbb5527ef1430bdb8f1159802344ca1d97
|
Python
|
apache-2.0
|
google/brandometer,google/brandometer,google/brandometer
|
Add unit tests for test_question_section_is_empty
Change-Id: Ic83098dbb5527ef1430bdb8f1159802344ca1d97
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from flask import Flask
import forms
COMPLETE_QUESTION1 = {
'question1': 'question',
'answer1a': 'answerA',
'answer1b': 'answerB',
'answer1c': 'answerC',
'answer1d': 'answerD',
'answer1anext': '2',
'answer1bnext': '2',
'answer1cnext': '2',
'answer1dnext': '2'
}
class QuestionFormTest(unittest.TestCase):
def create_form(self, data):
app = Flask(__name__)
with app.app_context():
app.config['WTF_CSRF_ENABLED'] = False
return forms.QuestionForm(data=data)
def test_question_section_is_empty_when_question_is_valid(self):
valid_form = self.create_form(COMPLETE_QUESTION1)
test_valid = forms.question_section_is_empty(valid_form, '1')
self.assertFalse(test_valid)
def test_question_section_is_empty_when_question_is_invalid(self):
invalid_question_cases = [dict(COMPLETE_QUESTION1, question1=None),
dict(COMPLETE_QUESTION1, answer1a=None),
dict(COMPLETE_QUESTION1, answer1b=None),
dict(COMPLETE_QUESTION1, answer1c=None),
dict(COMPLETE_QUESTION1, answer1d=None),
dict(COMPLETE_QUESTION1, answer1anext=None),
dict(COMPLETE_QUESTION1, answer1bnext=None),
dict(COMPLETE_QUESTION1, answer1cnext=None),
dict(COMPLETE_QUESTION1, answer1dnext=None)
]
for invalid_question in invalid_question_cases:
invalid_form = self.create_form(invalid_question)
test_invalid = forms.question_section_is_empty(invalid_form, '1')
self.assertTrue(test_invalid)
|
<commit_before><commit_msg>Add unit tests for test_question_section_is_empty
Change-Id: Ic83098dbb5527ef1430bdb8f1159802344ca1d97<commit_after>
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from flask import Flask
import forms
COMPLETE_QUESTION1 = {
'question1': 'question',
'answer1a': 'answerA',
'answer1b': 'answerB',
'answer1c': 'answerC',
'answer1d': 'answerD',
'answer1anext': '2',
'answer1bnext': '2',
'answer1cnext': '2',
'answer1dnext': '2'
}
class QuestionFormTest(unittest.TestCase):
def create_form(self, data):
app = Flask(__name__)
with app.app_context():
app.config['WTF_CSRF_ENABLED'] = False
return forms.QuestionForm(data=data)
def test_question_section_is_empty_when_question_is_valid(self):
valid_form = self.create_form(COMPLETE_QUESTION1)
test_valid = forms.question_section_is_empty(valid_form, '1')
self.assertFalse(test_valid)
def test_question_section_is_empty_when_question_is_invalid(self):
invalid_question_cases = [dict(COMPLETE_QUESTION1, question1=None),
dict(COMPLETE_QUESTION1, answer1a=None),
dict(COMPLETE_QUESTION1, answer1b=None),
dict(COMPLETE_QUESTION1, answer1c=None),
dict(COMPLETE_QUESTION1, answer1d=None),
dict(COMPLETE_QUESTION1, answer1anext=None),
dict(COMPLETE_QUESTION1, answer1bnext=None),
dict(COMPLETE_QUESTION1, answer1cnext=None),
dict(COMPLETE_QUESTION1, answer1dnext=None)
]
for invalid_question in invalid_question_cases:
invalid_form = self.create_form(invalid_question)
test_invalid = forms.question_section_is_empty(invalid_form, '1')
self.assertTrue(test_invalid)
|
Add unit tests for test_question_section_is_empty
Change-Id: Ic83098dbb5527ef1430bdb8f1159802344ca1d97# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from flask import Flask
import forms
COMPLETE_QUESTION1 = {
'question1': 'question',
'answer1a': 'answerA',
'answer1b': 'answerB',
'answer1c': 'answerC',
'answer1d': 'answerD',
'answer1anext': '2',
'answer1bnext': '2',
'answer1cnext': '2',
'answer1dnext': '2'
}
class QuestionFormTest(unittest.TestCase):
def create_form(self, data):
app = Flask(__name__)
with app.app_context():
app.config['WTF_CSRF_ENABLED'] = False
return forms.QuestionForm(data=data)
def test_question_section_is_empty_when_question_is_valid(self):
valid_form = self.create_form(COMPLETE_QUESTION1)
test_valid = forms.question_section_is_empty(valid_form, '1')
self.assertFalse(test_valid)
def test_question_section_is_empty_when_question_is_invalid(self):
invalid_question_cases = [dict(COMPLETE_QUESTION1, question1=None),
dict(COMPLETE_QUESTION1, answer1a=None),
dict(COMPLETE_QUESTION1, answer1b=None),
dict(COMPLETE_QUESTION1, answer1c=None),
dict(COMPLETE_QUESTION1, answer1d=None),
dict(COMPLETE_QUESTION1, answer1anext=None),
dict(COMPLETE_QUESTION1, answer1bnext=None),
dict(COMPLETE_QUESTION1, answer1cnext=None),
dict(COMPLETE_QUESTION1, answer1dnext=None)
]
for invalid_question in invalid_question_cases:
invalid_form = self.create_form(invalid_question)
test_invalid = forms.question_section_is_empty(invalid_form, '1')
self.assertTrue(test_invalid)
|
<commit_before><commit_msg>Add unit tests for test_question_section_is_empty
Change-Id: Ic83098dbb5527ef1430bdb8f1159802344ca1d97<commit_after># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from flask import Flask
import forms
COMPLETE_QUESTION1 = {
'question1': 'question',
'answer1a': 'answerA',
'answer1b': 'answerB',
'answer1c': 'answerC',
'answer1d': 'answerD',
'answer1anext': '2',
'answer1bnext': '2',
'answer1cnext': '2',
'answer1dnext': '2'
}
class QuestionFormTest(unittest.TestCase):
def create_form(self, data):
app = Flask(__name__)
with app.app_context():
app.config['WTF_CSRF_ENABLED'] = False
return forms.QuestionForm(data=data)
def test_question_section_is_empty_when_question_is_valid(self):
valid_form = self.create_form(COMPLETE_QUESTION1)
test_valid = forms.question_section_is_empty(valid_form, '1')
self.assertFalse(test_valid)
def test_question_section_is_empty_when_question_is_invalid(self):
invalid_question_cases = [dict(COMPLETE_QUESTION1, question1=None),
dict(COMPLETE_QUESTION1, answer1a=None),
dict(COMPLETE_QUESTION1, answer1b=None),
dict(COMPLETE_QUESTION1, answer1c=None),
dict(COMPLETE_QUESTION1, answer1d=None),
dict(COMPLETE_QUESTION1, answer1anext=None),
dict(COMPLETE_QUESTION1, answer1bnext=None),
dict(COMPLETE_QUESTION1, answer1cnext=None),
dict(COMPLETE_QUESTION1, answer1dnext=None)
]
for invalid_question in invalid_question_cases:
invalid_form = self.create_form(invalid_question)
test_invalid = forms.question_section_is_empty(invalid_form, '1')
self.assertTrue(test_invalid)
|
|
5daad1d1db64082fb0e5ec11089ee37187ab9f03
|
py/integer-break.py
|
py/integer-break.py
|
class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
d = {1: 1, 2:2, 3: 3, 4: 4}
d[1] = 1
if n <= 3:
return n - 1
for i in xrange(5, n + 1):
d[i] = max(d[a] * d[i - a] for a in xrange(1, i / 2 + 1))
return d[n]
|
Add py solution for 343. Integer Break
|
Add py solution for 343. Integer Break
343. Integer Break: https://leetcode.com/problems/integer-break/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 343. Integer Break
343. Integer Break: https://leetcode.com/problems/integer-break/
|
class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
d = {1: 1, 2:2, 3: 3, 4: 4}
d[1] = 1
if n <= 3:
return n - 1
for i in xrange(5, n + 1):
d[i] = max(d[a] * d[i - a] for a in xrange(1, i / 2 + 1))
return d[n]
|
<commit_before><commit_msg>Add py solution for 343. Integer Break
343. Integer Break: https://leetcode.com/problems/integer-break/<commit_after>
|
class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
d = {1: 1, 2:2, 3: 3, 4: 4}
d[1] = 1
if n <= 3:
return n - 1
for i in xrange(5, n + 1):
d[i] = max(d[a] * d[i - a] for a in xrange(1, i / 2 + 1))
return d[n]
|
Add py solution for 343. Integer Break
343. Integer Break: https://leetcode.com/problems/integer-break/class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
d = {1: 1, 2:2, 3: 3, 4: 4}
d[1] = 1
if n <= 3:
return n - 1
for i in xrange(5, n + 1):
d[i] = max(d[a] * d[i - a] for a in xrange(1, i / 2 + 1))
return d[n]
|
<commit_before><commit_msg>Add py solution for 343. Integer Break
343. Integer Break: https://leetcode.com/problems/integer-break/<commit_after>class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
d = {1: 1, 2:2, 3: 3, 4: 4}
d[1] = 1
if n <= 3:
return n - 1
for i in xrange(5, n + 1):
d[i] = max(d[a] * d[i - a] for a in xrange(1, i / 2 + 1))
return d[n]
|
|
9c3e98e75ae5d8819a2e88ffe4088c3f7df02f43
|
tests/test_models.py
|
tests/test_models.py
|
# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.contrib.auth import get_user_model
from django.test import TestCase
from social_django.models import UserSocialAuth
class TestSocialAuthUser(TestCase):
def test_user_relationship_none(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
with self.assertRaises(AttributeError):
user.social_user
def test_user_existing_relationship(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
with self.assertRaises(AttributeError):
user.social_user
def test_get_social_auth(self):
User = get_user_model()
user = User.objects.create_user(username="randomtester")
user_social = UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertEqual(other, user_social)
def test_get_social_auth_none(self):
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertIsNone(other)
|
Write a few tests for models
|
Write a few tests for models
Wrote these tests while investigating a problem I had while writing a
custom backend.
|
Python
|
bsd-3-clause
|
python-social-auth/social-app-django,python-social-auth/social-app-django,python-social-auth/social-app-django
|
Write a few tests for models
Wrote these tests while investigating a problem I had while writing a
custom backend.
|
# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.contrib.auth import get_user_model
from django.test import TestCase
from social_django.models import UserSocialAuth
class TestSocialAuthUser(TestCase):
def test_user_relationship_none(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
with self.assertRaises(AttributeError):
user.social_user
def test_user_existing_relationship(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
with self.assertRaises(AttributeError):
user.social_user
def test_get_social_auth(self):
User = get_user_model()
user = User.objects.create_user(username="randomtester")
user_social = UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertEqual(other, user_social)
def test_get_social_auth_none(self):
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertIsNone(other)
|
<commit_before><commit_msg>Write a few tests for models
Wrote these tests while investigating a problem I had while writing a
custom backend.<commit_after>
|
# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.contrib.auth import get_user_model
from django.test import TestCase
from social_django.models import UserSocialAuth
class TestSocialAuthUser(TestCase):
def test_user_relationship_none(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
with self.assertRaises(AttributeError):
user.social_user
def test_user_existing_relationship(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
with self.assertRaises(AttributeError):
user.social_user
def test_get_social_auth(self):
User = get_user_model()
user = User.objects.create_user(username="randomtester")
user_social = UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertEqual(other, user_social)
def test_get_social_auth_none(self):
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertIsNone(other)
|
Write a few tests for models
Wrote these tests while investigating a problem I had while writing a
custom backend.# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.contrib.auth import get_user_model
from django.test import TestCase
from social_django.models import UserSocialAuth
class TestSocialAuthUser(TestCase):
def test_user_relationship_none(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
with self.assertRaises(AttributeError):
user.social_user
def test_user_existing_relationship(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
with self.assertRaises(AttributeError):
user.social_user
def test_get_social_auth(self):
User = get_user_model()
user = User.objects.create_user(username="randomtester")
user_social = UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertEqual(other, user_social)
def test_get_social_auth_none(self):
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertIsNone(other)
|
<commit_before><commit_msg>Write a few tests for models
Wrote these tests while investigating a problem I had while writing a
custom backend.<commit_after># -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.contrib.auth import get_user_model
from django.test import TestCase
from social_django.models import UserSocialAuth
class TestSocialAuthUser(TestCase):
def test_user_relationship_none(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
with self.assertRaises(AttributeError):
user.social_user
def test_user_existing_relationship(self):
"""Accessing User.social_user outside of the pipeline doesn't work"""
User = get_user_model()
user = User.objects.create_user(username="randomtester")
UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
with self.assertRaises(AttributeError):
user.social_user
def test_get_social_auth(self):
User = get_user_model()
user = User.objects.create_user(username="randomtester")
user_social = UserSocialAuth.objects.create(user=user, provider='my-provider', uid='1234')
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertEqual(other, user_social)
def test_get_social_auth_none(self):
other = UserSocialAuth.get_social_auth('my-provider', '1234')
self.assertIsNone(other)
|
|
e3a44a03c0aa8166bb6ce6740a82004cfab7a8ab
|
test/test_examples.py
|
test/test_examples.py
|
import os
import subprocess
import tempfile
import nbformat
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
|
Add tests for jupyter notebooks
|
Add tests for jupyter notebooks
|
Python
|
mit
|
oemof/feedinlib
|
Add tests for jupyter notebooks
|
import os
import subprocess
import tempfile
import nbformat
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
|
<commit_before><commit_msg>Add tests for jupyter notebooks<commit_after>
|
import os
import subprocess
import tempfile
import nbformat
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
|
Add tests for jupyter notebooksimport os
import subprocess
import tempfile
import nbformat
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
|
<commit_before><commit_msg>Add tests for jupyter notebooks<commit_after>import os
import subprocess
import tempfile
import nbformat
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
|
|
8a5cf2d72e3530e42d56edc97e0182a81b15fbce
|
pombola/kenya/management/commands/kenya_dump_petition_and_senate_comments.py
|
pombola/kenya/management/commands/kenya_dump_petition_and_senate_comments.py
|
import json
from optparse import make_option
import re
from django.core.management.base import NoArgsCommand
from pombola.feedback.models import Feedback
from csv import DictWriter
def unpack_comment(comment_text):
m = re.search('(?s)^({.*?}) (.*)$', comment_text)
if not m:
raise Exception(u"Found a malformed comment: " + comment_text)
return json.loads(m.group(1)), m.group(2)
class Command(NoArgsCommand):
help = 'Generate a CSV file with all candiates for generating Google AdWords'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
print "in the command..."
comment_keys = ('user_key', 'g', 'agroup', 'user_key', 'experiment_slug', 'variant', 'via')
petition_headers = comment_keys + ('name', 'email')
# Petition signatories from the first two experiments
for filename, url_path in [
('petition-1.csv', '/county-performance/petition'),
('petition-2.csv', '/county-performance-2/petition'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, petition_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['name'] = comment
row_data['email'] = f.email
writer.writerow(row_data)
senate_headers = comment_keys + ('comment',)
for filename, url_path in [
('senate-1.csv', '/county-performance/senate'),
('senate-2.csv', '/county-performance-2/senate'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, senate_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['comment'] = comment
writer.writerow(row_data)
|
Add a comment to dump petition and senate comments data
|
KE: Add a comment to dump petition and senate comments data
This is needed both for analysis by MIT and to forward the comments on
to the senate.
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,hzj123/56th,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola
|
KE: Add a comment to dump petition and senate comments data
This is needed both for analysis by MIT and to forward the comments on
to the senate.
|
import json
from optparse import make_option
import re
from django.core.management.base import NoArgsCommand
from pombola.feedback.models import Feedback
from csv import DictWriter
def unpack_comment(comment_text):
m = re.search('(?s)^({.*?}) (.*)$', comment_text)
if not m:
raise Exception(u"Found a malformed comment: " + comment_text)
return json.loads(m.group(1)), m.group(2)
class Command(NoArgsCommand):
help = 'Generate a CSV file with all candiates for generating Google AdWords'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
print "in the command..."
comment_keys = ('user_key', 'g', 'agroup', 'user_key', 'experiment_slug', 'variant', 'via')
petition_headers = comment_keys + ('name', 'email')
# Petition signatories from the first two experiments
for filename, url_path in [
('petition-1.csv', '/county-performance/petition'),
('petition-2.csv', '/county-performance-2/petition'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, petition_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['name'] = comment
row_data['email'] = f.email
writer.writerow(row_data)
senate_headers = comment_keys + ('comment',)
for filename, url_path in [
('senate-1.csv', '/county-performance/senate'),
('senate-2.csv', '/county-performance-2/senate'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, senate_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['comment'] = comment
writer.writerow(row_data)
|
<commit_before><commit_msg>KE: Add a comment to dump petition and senate comments data
This is needed both for analysis by MIT and to forward the comments on
to the senate.<commit_after>
|
import json
from optparse import make_option
import re
from django.core.management.base import NoArgsCommand
from pombola.feedback.models import Feedback
from csv import DictWriter
def unpack_comment(comment_text):
m = re.search('(?s)^({.*?}) (.*)$', comment_text)
if not m:
raise Exception(u"Found a malformed comment: " + comment_text)
return json.loads(m.group(1)), m.group(2)
class Command(NoArgsCommand):
help = 'Generate a CSV file with all candiates for generating Google AdWords'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
print "in the command..."
comment_keys = ('user_key', 'g', 'agroup', 'user_key', 'experiment_slug', 'variant', 'via')
petition_headers = comment_keys + ('name', 'email')
# Petition signatories from the first two experiments
for filename, url_path in [
('petition-1.csv', '/county-performance/petition'),
('petition-2.csv', '/county-performance-2/petition'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, petition_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['name'] = comment
row_data['email'] = f.email
writer.writerow(row_data)
senate_headers = comment_keys + ('comment',)
for filename, url_path in [
('senate-1.csv', '/county-performance/senate'),
('senate-2.csv', '/county-performance-2/senate'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, senate_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['comment'] = comment
writer.writerow(row_data)
|
KE: Add a comment to dump petition and senate comments data
This is needed both for analysis by MIT and to forward the comments on
to the senate.import json
from optparse import make_option
import re
from django.core.management.base import NoArgsCommand
from pombola.feedback.models import Feedback
from csv import DictWriter
def unpack_comment(comment_text):
m = re.search('(?s)^({.*?}) (.*)$', comment_text)
if not m:
raise Exception(u"Found a malformed comment: " + comment_text)
return json.loads(m.group(1)), m.group(2)
class Command(NoArgsCommand):
help = 'Generate a CSV file with all candiates for generating Google AdWords'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
print "in the command..."
comment_keys = ('user_key', 'g', 'agroup', 'user_key', 'experiment_slug', 'variant', 'via')
petition_headers = comment_keys + ('name', 'email')
# Petition signatories from the first two experiments
for filename, url_path in [
('petition-1.csv', '/county-performance/petition'),
('petition-2.csv', '/county-performance-2/petition'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, petition_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['name'] = comment
row_data['email'] = f.email
writer.writerow(row_data)
senate_headers = comment_keys + ('comment',)
for filename, url_path in [
('senate-1.csv', '/county-performance/senate'),
('senate-2.csv', '/county-performance-2/senate'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, senate_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['comment'] = comment
writer.writerow(row_data)
|
<commit_before><commit_msg>KE: Add a comment to dump petition and senate comments data
This is needed both for analysis by MIT and to forward the comments on
to the senate.<commit_after>import json
from optparse import make_option
import re
from django.core.management.base import NoArgsCommand
from pombola.feedback.models import Feedback
from csv import DictWriter
def unpack_comment(comment_text):
m = re.search('(?s)^({.*?}) (.*)$', comment_text)
if not m:
raise Exception(u"Found a malformed comment: " + comment_text)
return json.loads(m.group(1)), m.group(2)
class Command(NoArgsCommand):
help = 'Generate a CSV file with all candiates for generating Google AdWords'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
print "in the command..."
comment_keys = ('user_key', 'g', 'agroup', 'user_key', 'experiment_slug', 'variant', 'via')
petition_headers = comment_keys + ('name', 'email')
# Petition signatories from the first two experiments
for filename, url_path in [
('petition-1.csv', '/county-performance/petition'),
('petition-2.csv', '/county-performance-2/petition'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, petition_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['name'] = comment
row_data['email'] = f.email
writer.writerow(row_data)
senate_headers = comment_keys + ('comment',)
for filename, url_path in [
('senate-1.csv', '/county-performance/senate'),
('senate-2.csv', '/county-performance-2/senate'),
]:
with open(filename, "wb") as f:
writer = DictWriter(f, senate_headers)
writer.writeheader()
for f in Feedback.objects.filter(url__endswith=url_path):
data, comment = unpack_comment(f.comment)
row_data = data.copy()
row_data['comment'] = comment
writer.writerow(row_data)
|
|
6d2c9fa6ae3d3c8b25b861b3f369f250e1ba6ed7
|
asdhash.py
|
asdhash.py
|
import asyncio
import time
import random
import string
class ASyncDHash(object):
def __init__(self):
self.nodes = [ASyncNodes('#1'), ASyncNodes('#2')]
async def read(self, key):
if key % 2 == 0:
await self.nodes[0].read(key)
else:
await self.nodes[1].read(key)
async def write(self, key, value):
if key % 2 == 0:
await self.nodes[0].write(key, value)
else:
await self.nodes[1].write(key, value)
class ASyncNodes(object):
def __init__(self, name):
self.name = name
self.storage = {}
async def read(self, key):
await asyncio.sleep(1)
if key in self.storage:
print('Read ', self.storage[key], 'with key of ', key)
return self.storage[key]
else:
print('Nothing read for key of ', key)
async def write(self, key, value):
await asyncio.sleep(2)
print('Wrote ', value, 'with key of ', key)
self.storage[key] = value
async def random_gen(dhash):
while True:
key = random.randint(0,10)
value = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
if random.choice([0,1]):
await dhash.read(key)
else:
await dhash.write(key, value)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
dhash = ASyncDHash()
tasks = [
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash))
]
loop.run_until_complete(asyncio.gather(*tasks))
|
Add a simple example using asyncio: something that could be used to emulate physical nodes.
|
Add a simple example using asyncio: something that could be used to emulate physical nodes.
|
Python
|
mit
|
gudnm/dhash
|
Add a simple example using asyncio: something that could be used to emulate physical nodes.
|
import asyncio
import time
import random
import string
class ASyncDHash(object):
def __init__(self):
self.nodes = [ASyncNodes('#1'), ASyncNodes('#2')]
async def read(self, key):
if key % 2 == 0:
await self.nodes[0].read(key)
else:
await self.nodes[1].read(key)
async def write(self, key, value):
if key % 2 == 0:
await self.nodes[0].write(key, value)
else:
await self.nodes[1].write(key, value)
class ASyncNodes(object):
def __init__(self, name):
self.name = name
self.storage = {}
async def read(self, key):
await asyncio.sleep(1)
if key in self.storage:
print('Read ', self.storage[key], 'with key of ', key)
return self.storage[key]
else:
print('Nothing read for key of ', key)
async def write(self, key, value):
await asyncio.sleep(2)
print('Wrote ', value, 'with key of ', key)
self.storage[key] = value
async def random_gen(dhash):
while True:
key = random.randint(0,10)
value = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
if random.choice([0,1]):
await dhash.read(key)
else:
await dhash.write(key, value)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
dhash = ASyncDHash()
tasks = [
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash))
]
loop.run_until_complete(asyncio.gather(*tasks))
|
<commit_before><commit_msg>Add a simple example using asyncio: something that could be used to emulate physical nodes.<commit_after>
|
import asyncio
import time
import random
import string
class ASyncDHash(object):
def __init__(self):
self.nodes = [ASyncNodes('#1'), ASyncNodes('#2')]
async def read(self, key):
if key % 2 == 0:
await self.nodes[0].read(key)
else:
await self.nodes[1].read(key)
async def write(self, key, value):
if key % 2 == 0:
await self.nodes[0].write(key, value)
else:
await self.nodes[1].write(key, value)
class ASyncNodes(object):
def __init__(self, name):
self.name = name
self.storage = {}
async def read(self, key):
await asyncio.sleep(1)
if key in self.storage:
print('Read ', self.storage[key], 'with key of ', key)
return self.storage[key]
else:
print('Nothing read for key of ', key)
async def write(self, key, value):
await asyncio.sleep(2)
print('Wrote ', value, 'with key of ', key)
self.storage[key] = value
async def random_gen(dhash):
while True:
key = random.randint(0,10)
value = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
if random.choice([0,1]):
await dhash.read(key)
else:
await dhash.write(key, value)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
dhash = ASyncDHash()
tasks = [
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash))
]
loop.run_until_complete(asyncio.gather(*tasks))
|
Add a simple example using asyncio: something that could be used to emulate physical nodes.import asyncio
import time
import random
import string
class ASyncDHash(object):
def __init__(self):
self.nodes = [ASyncNodes('#1'), ASyncNodes('#2')]
async def read(self, key):
if key % 2 == 0:
await self.nodes[0].read(key)
else:
await self.nodes[1].read(key)
async def write(self, key, value):
if key % 2 == 0:
await self.nodes[0].write(key, value)
else:
await self.nodes[1].write(key, value)
class ASyncNodes(object):
def __init__(self, name):
self.name = name
self.storage = {}
async def read(self, key):
await asyncio.sleep(1)
if key in self.storage:
print('Read ', self.storage[key], 'with key of ', key)
return self.storage[key]
else:
print('Nothing read for key of ', key)
async def write(self, key, value):
await asyncio.sleep(2)
print('Wrote ', value, 'with key of ', key)
self.storage[key] = value
async def random_gen(dhash):
while True:
key = random.randint(0,10)
value = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
if random.choice([0,1]):
await dhash.read(key)
else:
await dhash.write(key, value)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
dhash = ASyncDHash()
tasks = [
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash))
]
loop.run_until_complete(asyncio.gather(*tasks))
|
<commit_before><commit_msg>Add a simple example using asyncio: something that could be used to emulate physical nodes.<commit_after>import asyncio
import time
import random
import string
class ASyncDHash(object):
def __init__(self):
self.nodes = [ASyncNodes('#1'), ASyncNodes('#2')]
async def read(self, key):
if key % 2 == 0:
await self.nodes[0].read(key)
else:
await self.nodes[1].read(key)
async def write(self, key, value):
if key % 2 == 0:
await self.nodes[0].write(key, value)
else:
await self.nodes[1].write(key, value)
class ASyncNodes(object):
def __init__(self, name):
self.name = name
self.storage = {}
async def read(self, key):
await asyncio.sleep(1)
if key in self.storage:
print('Read ', self.storage[key], 'with key of ', key)
return self.storage[key]
else:
print('Nothing read for key of ', key)
async def write(self, key, value):
await asyncio.sleep(2)
print('Wrote ', value, 'with key of ', key)
self.storage[key] = value
async def random_gen(dhash):
while True:
key = random.randint(0,10)
value = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
if random.choice([0,1]):
await dhash.read(key)
else:
await dhash.write(key, value)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
dhash = ASyncDHash()
tasks = [
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash)),
loop.create_task(random_gen(dhash))
]
loop.run_until_complete(asyncio.gather(*tasks))
|
|
bec7e3c3e479216d419281af4bf52151101fb6a9
|
webkit/tools/layout_tests/PRESUBMIT.py
|
webkit/tools/layout_tests/PRESUBMIT.py
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS = 'test_expectations.txt'
def LintTestFiles(input_api, output_api):
current_dir = input_api.PresubmitLocalPath()
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if TEST_EXPECTATIONS == input_api.os_path.basename(path):
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
|
Add presubmit check to lint test files.
|
Add presubmit check to lint test files.
BUG=5339
TEST=report error by "gcl presubmit" with bad test_expectations.txt.
Review URL: http://codereview.chromium.org/160442
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@22456 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
chuan9/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,Jonekee/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,patrickm/chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,keishi/chromium,mogoweb/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,timopulkkinen/BubbleFish,dushu1203/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,keishi/chromium,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,patrickm/chromium.src,rogerwang/chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,keishi/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,ondra-novak/chromium.src,ondra-novak/chromium.src,keishi/chromium,markYoungH/chromium.src,robclark/chromium,keishi/chromium,Chilledheart/chromium,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,keishi/chromium,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,anirudhSK/chromium,M4sse/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,keishi/chromium,robclark/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,anirudhSK/chromium,hujiajie/pa-chromium,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,robclark/chromium,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,Jonekee/chromium.src,Jonekee/chromium.src,zcbenz/cefode-chromium,patrickm/chromium.src,krieger-od/nwjs_chromium.src,patrickm/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,patrickm/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,jaruba/chromium.src,ltilve/chromium,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,Fireblend/chromium-crosswalk,rogerwang/chromium,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,keishi/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,rogerwang/chromium,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,robclark/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,keishi/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,rogerwang/chromium,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,rogerwang/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,littlstar/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,rogerwang/chromium,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,rogerwang/chromium,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,jaruba/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,robclark/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,jaruba/chromium.src,robclark/chromium,timopulkkinen/BubbleFish,ltilve/chromium,nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,robclark/chromium,dushu1203/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,Just-D/chromium-1,markYoungH/chromium.src,zcbenz/cefode-chromium,ondra-novak/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,ltilve/chromium,littlstar/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,M4sse/chromium.src,rogerwang/chromium,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,Chilledheart/chromium,ChromiumWebApps/chromium,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,zcbenz/cefode-chromium,dushu1203/chromium.src,zcbenz/cefode-chromium,robclark/chromium,Just-D/chromium-1,dushu1203/chromium.src,robclark/chromium,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,keishi/chromium,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,robclark/chromium,anirudhSK/chromium,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,keishi/chromium,anirudhSK/chromium,Chilledheart/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,littlstar/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,dednal/chromium.src,Chilledheart/chromium,ltilve/chromium,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Just-D/chromium-1,hgl888/chromium-crosswalk,ltilve/chromium,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,jaruba/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,littlstar/chromium.src,Chilledheart/chromium
|
Add presubmit check to lint test files.
BUG=5339
TEST=report error by "gcl presubmit" with bad test_expectations.txt.
Review URL: http://codereview.chromium.org/160442
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@22456 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS = 'test_expectations.txt'
def LintTestFiles(input_api, output_api):
current_dir = input_api.PresubmitLocalPath()
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if TEST_EXPECTATIONS == input_api.os_path.basename(path):
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
|
<commit_before><commit_msg>Add presubmit check to lint test files.
BUG=5339
TEST=report error by "gcl presubmit" with bad test_expectations.txt.
Review URL: http://codereview.chromium.org/160442
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@22456 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS = 'test_expectations.txt'
def LintTestFiles(input_api, output_api):
current_dir = input_api.PresubmitLocalPath()
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if TEST_EXPECTATIONS == input_api.os_path.basename(path):
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
|
Add presubmit check to lint test files.
BUG=5339
TEST=report error by "gcl presubmit" with bad test_expectations.txt.
Review URL: http://codereview.chromium.org/160442
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@22456 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS = 'test_expectations.txt'
def LintTestFiles(input_api, output_api):
current_dir = input_api.PresubmitLocalPath()
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if TEST_EXPECTATIONS == input_api.os_path.basename(path):
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
|
<commit_before><commit_msg>Add presubmit check to lint test files.
BUG=5339
TEST=report error by "gcl presubmit" with bad test_expectations.txt.
Review URL: http://codereview.chromium.org/160442
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@22456 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS = 'test_expectations.txt'
def LintTestFiles(input_api, output_api):
current_dir = input_api.PresubmitLocalPath()
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if TEST_EXPECTATIONS == input_api.os_path.basename(path):
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
|
|
195a806d130a20b91ac1c92a39b6de80565c4b67
|
all_f_g.py
|
all_f_g.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 03 10:28:37 2015
@author: Jens von der Linden
global variables all_f and all_g save value of f,g and r each time an f_func, or
g_func is called.
In the form [r, f].
To call fs convert to numpy array and all_f[:, 1] for rs all_f[:, 0].
Used for debugging.
"""
all_f = []
all_g = []
all_g_term1 = []
all_g_term2 = []
all_g_term3 = []
all_g_term4 = []
all_g_term5 = []
all_g_term6 = []
|
Add module for global debug variables.
|
Add module for global debug variables.
|
Python
|
mit
|
jensv/fluxtubestability,jensv/fluxtubestability
|
Add module for global debug variables.
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 03 10:28:37 2015
@author: Jens von der Linden
global variables all_f and all_g save value of f,g and r each time an f_func, or
g_func is called.
In the form [r, f].
To call fs convert to numpy array and all_f[:, 1] for rs all_f[:, 0].
Used for debugging.
"""
all_f = []
all_g = []
all_g_term1 = []
all_g_term2 = []
all_g_term3 = []
all_g_term4 = []
all_g_term5 = []
all_g_term6 = []
|
<commit_before><commit_msg>Add module for global debug variables.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 03 10:28:37 2015
@author: Jens von der Linden
global variables all_f and all_g save value of f,g and r each time an f_func, or
g_func is called.
In the form [r, f].
To call fs convert to numpy array and all_f[:, 1] for rs all_f[:, 0].
Used for debugging.
"""
all_f = []
all_g = []
all_g_term1 = []
all_g_term2 = []
all_g_term3 = []
all_g_term4 = []
all_g_term5 = []
all_g_term6 = []
|
Add module for global debug variables.# -*- coding: utf-8 -*-
"""
Created on Tue Feb 03 10:28:37 2015
@author: Jens von der Linden
global variables all_f and all_g save value of f,g and r each time an f_func, or
g_func is called.
In the form [r, f].
To call fs convert to numpy array and all_f[:, 1] for rs all_f[:, 0].
Used for debugging.
"""
all_f = []
all_g = []
all_g_term1 = []
all_g_term2 = []
all_g_term3 = []
all_g_term4 = []
all_g_term5 = []
all_g_term6 = []
|
<commit_before><commit_msg>Add module for global debug variables.<commit_after># -*- coding: utf-8 -*-
"""
Created on Tue Feb 03 10:28:37 2015
@author: Jens von der Linden
global variables all_f and all_g save value of f,g and r each time an f_func, or
g_func is called.
In the form [r, f].
To call fs convert to numpy array and all_f[:, 1] for rs all_f[:, 0].
Used for debugging.
"""
all_f = []
all_g = []
all_g_term1 = []
all_g_term2 = []
all_g_term3 = []
all_g_term4 = []
all_g_term5 = []
all_g_term6 = []
|
|
e5798a7cca03a733c5bef2cdba735ea5a60b1424
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
Add fabric.py helper script back
|
Add fabric.py helper script back
|
Python
|
mit
|
istresearch/traptor,istresearch/traptor
|
Add fabric.py helper script back
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
<commit_before><commit_msg>Add fabric.py helper script back<commit_after>
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
Add fabric.py helper script backfrom fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
<commit_before><commit_msg>Add fabric.py helper script back<commit_after>from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
|
56e2088505d9c70f1e6df204eec4f05d05de3f93
|
tests/test_driller.py
|
tests/test_driller.py
|
import nose
import driller
import logging
l = logging.getLogger("driller.tests.test_driller")
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
def test_drilling_cgc():
'''
test drilling on the cgc binary, palindrome.
'''
binary = "cgc_scored_event_1/cgc/0b32aa01_01"
# fuzzbitmap says every transition is worth satisfying
d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~")
new_inputs = d.drill()
nose.tools.assert_equal(len(new_inputs), 7)
# make sure driller produced a new input which hits the easter egg
nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs)))
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
run_all()
|
Add a simple test case to driller
|
Add a simple test case to driller
|
Python
|
bsd-2-clause
|
shellphish/driller
|
Add a simple test case to driller
|
import nose
import driller
import logging
l = logging.getLogger("driller.tests.test_driller")
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
def test_drilling_cgc():
'''
test drilling on the cgc binary, palindrome.
'''
binary = "cgc_scored_event_1/cgc/0b32aa01_01"
# fuzzbitmap says every transition is worth satisfying
d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~")
new_inputs = d.drill()
nose.tools.assert_equal(len(new_inputs), 7)
# make sure driller produced a new input which hits the easter egg
nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs)))
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
run_all()
|
<commit_before><commit_msg>Add a simple test case to driller<commit_after>
|
import nose
import driller
import logging
l = logging.getLogger("driller.tests.test_driller")
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
def test_drilling_cgc():
'''
test drilling on the cgc binary, palindrome.
'''
binary = "cgc_scored_event_1/cgc/0b32aa01_01"
# fuzzbitmap says every transition is worth satisfying
d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~")
new_inputs = d.drill()
nose.tools.assert_equal(len(new_inputs), 7)
# make sure driller produced a new input which hits the easter egg
nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs)))
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
run_all()
|
Add a simple test case to drillerimport nose
import driller
import logging
l = logging.getLogger("driller.tests.test_driller")
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
def test_drilling_cgc():
'''
test drilling on the cgc binary, palindrome.
'''
binary = "cgc_scored_event_1/cgc/0b32aa01_01"
# fuzzbitmap says every transition is worth satisfying
d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~")
new_inputs = d.drill()
nose.tools.assert_equal(len(new_inputs), 7)
# make sure driller produced a new input which hits the easter egg
nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs)))
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
run_all()
|
<commit_before><commit_msg>Add a simple test case to driller<commit_after>import nose
import driller
import logging
l = logging.getLogger("driller.tests.test_driller")
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
def test_drilling_cgc():
'''
test drilling on the cgc binary, palindrome.
'''
binary = "cgc_scored_event_1/cgc/0b32aa01_01"
# fuzzbitmap says every transition is worth satisfying
d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~")
new_inputs = d.drill()
nose.tools.assert_equal(len(new_inputs), 7)
# make sure driller produced a new input which hits the easter egg
nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs)))
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
run_all()
|
|
f3169011a9225161d004d4318aaf1375c109b0d6
|
tools/lcp_from_csv.py
|
tools/lcp_from_csv.py
|
#!./venv/bin/python
import argparse
import csv
import datetime
import sys
import re
from collections import namedtuple
from blag import create_app, db
from blag.models import HikeDestination, Hike
DATE_FORMAT = '%d.%m.%Y'
COORDINATE_FORMAT = re.compile(r'^([0-9.-]+),\s*([0-9.-]+)$')
METHOD_MAP = {
'fots': 'foot',
'stegjern': 'crampons',
}
PeakTuple = namedtuple('PeakTuple', 'name method coordinates date notes')
def main():
args = get_args()
peaks = parse_csv(args.file)
insert_to_database(peaks, args.database)
def parse_csv(input_file):
'''Assumes fields are name,method,coordinate,date,notes'''
parsed = []
with open(input_file) as fh:
csv_reader = csv.reader(fh)
for row_num, row in enumerate(csv_reader):
if row_num == 0:
# Skip the header line
continue
assert len(row) == 5
peak = PeakTuple(*(elem.strip() for elem in row))
parsed.append(peak)
return parsed
def insert_to_database(peaks, database):
app = create_app(SQLALCHEMY_DATABASE_URI=database)
with app.app_context():
for peak in peaks:
if not peak.coordinates:
sys.stderr.write('Skipping %s due to missing coordinates\n' % peak.name)
continue
hike_destination = HikeDestination(
name=peak.name,
high_point_coord=point_from_coordinates(peak.coordinates),
altitude=0,
is_summit=True,
)
db.session.add(hike_destination)
if not peak.method:
sys.stderr.write('Skipping hike for %s since method is missing\n' % peak.name)
continue
hike = Hike(
destination=hike_destination,
method=METHOD_MAP.get(peak.method, peak.method),
date=datetime.datetime.strptime(peak.date, DATE_FORMAT) if peak.date else None,
)
db.session.add(hike)
db.session.commit()
def point_from_coordinates(coord):
'''Transform a "float, float" string to POINT(float float)'''
match = COORDINATE_FORMAT.match(coord)
assert match, '%s didnt match coordinate format' % coord
parsed_coords = (match.group(1), match.group(2))
return 'POINT(%s %s)' % parsed_coords
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='File to read the LCP from')
parser.add_argument('-d', '--database', help='Address to the database to insert into',
default='postgres://vagrant:vagrant@10.20.30.50/vagrant')
return parser.parse_args()
if __name__ == '__main__':
main()
|
Add script to load peaks from csv
|
Add script to load peaks from csv
|
Python
|
mit
|
thusoy/blag,thusoy/blag,thusoy/blag,thusoy/blag
|
Add script to load peaks from csv
|
#!./venv/bin/python
import argparse
import csv
import datetime
import sys
import re
from collections import namedtuple
from blag import create_app, db
from blag.models import HikeDestination, Hike
DATE_FORMAT = '%d.%m.%Y'
COORDINATE_FORMAT = re.compile(r'^([0-9.-]+),\s*([0-9.-]+)$')
METHOD_MAP = {
'fots': 'foot',
'stegjern': 'crampons',
}
PeakTuple = namedtuple('PeakTuple', 'name method coordinates date notes')
def main():
args = get_args()
peaks = parse_csv(args.file)
insert_to_database(peaks, args.database)
def parse_csv(input_file):
'''Assumes fields are name,method,coordinate,date,notes'''
parsed = []
with open(input_file) as fh:
csv_reader = csv.reader(fh)
for row_num, row in enumerate(csv_reader):
if row_num == 0:
# Skip the header line
continue
assert len(row) == 5
peak = PeakTuple(*(elem.strip() for elem in row))
parsed.append(peak)
return parsed
def insert_to_database(peaks, database):
app = create_app(SQLALCHEMY_DATABASE_URI=database)
with app.app_context():
for peak in peaks:
if not peak.coordinates:
sys.stderr.write('Skipping %s due to missing coordinates\n' % peak.name)
continue
hike_destination = HikeDestination(
name=peak.name,
high_point_coord=point_from_coordinates(peak.coordinates),
altitude=0,
is_summit=True,
)
db.session.add(hike_destination)
if not peak.method:
sys.stderr.write('Skipping hike for %s since method is missing\n' % peak.name)
continue
hike = Hike(
destination=hike_destination,
method=METHOD_MAP.get(peak.method, peak.method),
date=datetime.datetime.strptime(peak.date, DATE_FORMAT) if peak.date else None,
)
db.session.add(hike)
db.session.commit()
def point_from_coordinates(coord):
'''Transform a "float, float" string to POINT(float float)'''
match = COORDINATE_FORMAT.match(coord)
assert match, '%s didnt match coordinate format' % coord
parsed_coords = (match.group(1), match.group(2))
return 'POINT(%s %s)' % parsed_coords
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='File to read the LCP from')
parser.add_argument('-d', '--database', help='Address to the database to insert into',
default='postgres://vagrant:vagrant@10.20.30.50/vagrant')
return parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to load peaks from csv<commit_after>
|
#!./venv/bin/python
import argparse
import csv
import datetime
import sys
import re
from collections import namedtuple
from blag import create_app, db
from blag.models import HikeDestination, Hike
DATE_FORMAT = '%d.%m.%Y'
COORDINATE_FORMAT = re.compile(r'^([0-9.-]+),\s*([0-9.-]+)$')
METHOD_MAP = {
'fots': 'foot',
'stegjern': 'crampons',
}
PeakTuple = namedtuple('PeakTuple', 'name method coordinates date notes')
def main():
args = get_args()
peaks = parse_csv(args.file)
insert_to_database(peaks, args.database)
def parse_csv(input_file):
'''Assumes fields are name,method,coordinate,date,notes'''
parsed = []
with open(input_file) as fh:
csv_reader = csv.reader(fh)
for row_num, row in enumerate(csv_reader):
if row_num == 0:
# Skip the header line
continue
assert len(row) == 5
peak = PeakTuple(*(elem.strip() for elem in row))
parsed.append(peak)
return parsed
def insert_to_database(peaks, database):
app = create_app(SQLALCHEMY_DATABASE_URI=database)
with app.app_context():
for peak in peaks:
if not peak.coordinates:
sys.stderr.write('Skipping %s due to missing coordinates\n' % peak.name)
continue
hike_destination = HikeDestination(
name=peak.name,
high_point_coord=point_from_coordinates(peak.coordinates),
altitude=0,
is_summit=True,
)
db.session.add(hike_destination)
if not peak.method:
sys.stderr.write('Skipping hike for %s since method is missing\n' % peak.name)
continue
hike = Hike(
destination=hike_destination,
method=METHOD_MAP.get(peak.method, peak.method),
date=datetime.datetime.strptime(peak.date, DATE_FORMAT) if peak.date else None,
)
db.session.add(hike)
db.session.commit()
def point_from_coordinates(coord):
'''Transform a "float, float" string to POINT(float float)'''
match = COORDINATE_FORMAT.match(coord)
assert match, '%s didnt match coordinate format' % coord
parsed_coords = (match.group(1), match.group(2))
return 'POINT(%s %s)' % parsed_coords
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='File to read the LCP from')
parser.add_argument('-d', '--database', help='Address to the database to insert into',
default='postgres://vagrant:vagrant@10.20.30.50/vagrant')
return parser.parse_args()
if __name__ == '__main__':
main()
|
Add script to load peaks from csv#!./venv/bin/python
import argparse
import csv
import datetime
import sys
import re
from collections import namedtuple
from blag import create_app, db
from blag.models import HikeDestination, Hike
DATE_FORMAT = '%d.%m.%Y'
COORDINATE_FORMAT = re.compile(r'^([0-9.-]+),\s*([0-9.-]+)$')
METHOD_MAP = {
'fots': 'foot',
'stegjern': 'crampons',
}
PeakTuple = namedtuple('PeakTuple', 'name method coordinates date notes')
def main():
args = get_args()
peaks = parse_csv(args.file)
insert_to_database(peaks, args.database)
def parse_csv(input_file):
'''Assumes fields are name,method,coordinate,date,notes'''
parsed = []
with open(input_file) as fh:
csv_reader = csv.reader(fh)
for row_num, row in enumerate(csv_reader):
if row_num == 0:
# Skip the header line
continue
assert len(row) == 5
peak = PeakTuple(*(elem.strip() for elem in row))
parsed.append(peak)
return parsed
def insert_to_database(peaks, database):
app = create_app(SQLALCHEMY_DATABASE_URI=database)
with app.app_context():
for peak in peaks:
if not peak.coordinates:
sys.stderr.write('Skipping %s due to missing coordinates\n' % peak.name)
continue
hike_destination = HikeDestination(
name=peak.name,
high_point_coord=point_from_coordinates(peak.coordinates),
altitude=0,
is_summit=True,
)
db.session.add(hike_destination)
if not peak.method:
sys.stderr.write('Skipping hike for %s since method is missing\n' % peak.name)
continue
hike = Hike(
destination=hike_destination,
method=METHOD_MAP.get(peak.method, peak.method),
date=datetime.datetime.strptime(peak.date, DATE_FORMAT) if peak.date else None,
)
db.session.add(hike)
db.session.commit()
def point_from_coordinates(coord):
'''Transform a "float, float" string to POINT(float float)'''
match = COORDINATE_FORMAT.match(coord)
assert match, '%s didnt match coordinate format' % coord
parsed_coords = (match.group(1), match.group(2))
return 'POINT(%s %s)' % parsed_coords
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='File to read the LCP from')
parser.add_argument('-d', '--database', help='Address to the database to insert into',
default='postgres://vagrant:vagrant@10.20.30.50/vagrant')
return parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to load peaks from csv<commit_after>#!./venv/bin/python
import argparse
import csv
import datetime
import sys
import re
from collections import namedtuple
from blag import create_app, db
from blag.models import HikeDestination, Hike
DATE_FORMAT = '%d.%m.%Y'
COORDINATE_FORMAT = re.compile(r'^([0-9.-]+),\s*([0-9.-]+)$')
METHOD_MAP = {
'fots': 'foot',
'stegjern': 'crampons',
}
PeakTuple = namedtuple('PeakTuple', 'name method coordinates date notes')
def main():
args = get_args()
peaks = parse_csv(args.file)
insert_to_database(peaks, args.database)
def parse_csv(input_file):
'''Assumes fields are name,method,coordinate,date,notes'''
parsed = []
with open(input_file) as fh:
csv_reader = csv.reader(fh)
for row_num, row in enumerate(csv_reader):
if row_num == 0:
# Skip the header line
continue
assert len(row) == 5
peak = PeakTuple(*(elem.strip() for elem in row))
parsed.append(peak)
return parsed
def insert_to_database(peaks, database):
app = create_app(SQLALCHEMY_DATABASE_URI=database)
with app.app_context():
for peak in peaks:
if not peak.coordinates:
sys.stderr.write('Skipping %s due to missing coordinates\n' % peak.name)
continue
hike_destination = HikeDestination(
name=peak.name,
high_point_coord=point_from_coordinates(peak.coordinates),
altitude=0,
is_summit=True,
)
db.session.add(hike_destination)
if not peak.method:
sys.stderr.write('Skipping hike for %s since method is missing\n' % peak.name)
continue
hike = Hike(
destination=hike_destination,
method=METHOD_MAP.get(peak.method, peak.method),
date=datetime.datetime.strptime(peak.date, DATE_FORMAT) if peak.date else None,
)
db.session.add(hike)
db.session.commit()
def point_from_coordinates(coord):
'''Transform a "float, float" string to POINT(float float)'''
match = COORDINATE_FORMAT.match(coord)
assert match, '%s didnt match coordinate format' % coord
parsed_coords = (match.group(1), match.group(2))
return 'POINT(%s %s)' % parsed_coords
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='File to read the LCP from')
parser.add_argument('-d', '--database', help='Address to the database to insert into',
default='postgres://vagrant:vagrant@10.20.30.50/vagrant')
return parser.parse_args()
if __name__ == '__main__':
main()
|
|
5235f0eea27360e9891bc905329f8e5fd2e51a4c
|
src/Scripts/wc-history.py
|
src/Scripts/wc-history.py
|
import subprocess
# Get history of linecount by date.
repo_dir = '/Users/visualstudio/dev/bf-count-lines'
def get_num_lines():
# find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | xargs wc'.split()
find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | grep -v Data | xargs wc'
p_find = subprocess.Popen(find_cmd, cwd=repo_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
find_out = p_find.stdout.read()
num_lines = find_out.split()[-4].decode("utf-8")
return num_lines
def checkout(sha):
checkout_cmd = ['git', 'checkout', sha]
p_checkout = subprocess.Popen(checkout_cmd, cwd=repo_dir,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p_checkout.wait()
# Output from `git log --date=local --format="%h,%ad"`
git_log = [line.strip() for line in open('log-date')]
git_log.reverse()
print('{},{}'.format('date','wc'))
for line in git_log:
sha, date = line.split(',')
checkout(sha)
wc = get_num_lines()
print('{},{}'.format(date, wc))
|
Add quick script to get history of wc.
|
Add quick script to get history of wc.
|
Python
|
mit
|
BitFunnel/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel
|
Add quick script to get history of wc.
|
import subprocess
# Get history of linecount by date.
repo_dir = '/Users/visualstudio/dev/bf-count-lines'
def get_num_lines():
# find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | xargs wc'.split()
find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | grep -v Data | xargs wc'
p_find = subprocess.Popen(find_cmd, cwd=repo_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
find_out = p_find.stdout.read()
num_lines = find_out.split()[-4].decode("utf-8")
return num_lines
def checkout(sha):
checkout_cmd = ['git', 'checkout', sha]
p_checkout = subprocess.Popen(checkout_cmd, cwd=repo_dir,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p_checkout.wait()
# Output from `git log --date=local --format="%h,%ad"`
git_log = [line.strip() for line in open('log-date')]
git_log.reverse()
print('{},{}'.format('date','wc'))
for line in git_log:
sha, date = line.split(',')
checkout(sha)
wc = get_num_lines()
print('{},{}'.format(date, wc))
|
<commit_before><commit_msg>Add quick script to get history of wc.<commit_after>
|
import subprocess
# Get history of linecount by date.
repo_dir = '/Users/visualstudio/dev/bf-count-lines'
def get_num_lines():
# find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | xargs wc'.split()
find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | grep -v Data | xargs wc'
p_find = subprocess.Popen(find_cmd, cwd=repo_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
find_out = p_find.stdout.read()
num_lines = find_out.split()[-4].decode("utf-8")
return num_lines
def checkout(sha):
checkout_cmd = ['git', 'checkout', sha]
p_checkout = subprocess.Popen(checkout_cmd, cwd=repo_dir,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p_checkout.wait()
# Output from `git log --date=local --format="%h,%ad"`
git_log = [line.strip() for line in open('log-date')]
git_log.reverse()
print('{},{}'.format('date','wc'))
for line in git_log:
sha, date = line.split(',')
checkout(sha)
wc = get_num_lines()
print('{},{}'.format(date, wc))
|
Add quick script to get history of wc.import subprocess
# Get history of linecount by date.
repo_dir = '/Users/visualstudio/dev/bf-count-lines'
def get_num_lines():
# find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | xargs wc'.split()
find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | grep -v Data | xargs wc'
p_find = subprocess.Popen(find_cmd, cwd=repo_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
find_out = p_find.stdout.read()
num_lines = find_out.split()[-4].decode("utf-8")
return num_lines
def checkout(sha):
checkout_cmd = ['git', 'checkout', sha]
p_checkout = subprocess.Popen(checkout_cmd, cwd=repo_dir,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p_checkout.wait()
# Output from `git log --date=local --format="%h,%ad"`
git_log = [line.strip() for line in open('log-date')]
git_log.reverse()
print('{},{}'.format('date','wc'))
for line in git_log:
sha, date = line.split(',')
checkout(sha)
wc = get_num_lines()
print('{},{}'.format(date, wc))
|
<commit_before><commit_msg>Add quick script to get history of wc.<commit_after>import subprocess
# Get history of linecount by date.
repo_dir = '/Users/visualstudio/dev/bf-count-lines'
def get_num_lines():
# find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | xargs wc'.split()
find_cmd = 'find inc src test tools -name "*.cpp" -o -name "*.h" | grep -v Data | xargs wc'
p_find = subprocess.Popen(find_cmd, cwd=repo_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
find_out = p_find.stdout.read()
num_lines = find_out.split()[-4].decode("utf-8")
return num_lines
def checkout(sha):
checkout_cmd = ['git', 'checkout', sha]
p_checkout = subprocess.Popen(checkout_cmd, cwd=repo_dir,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p_checkout.wait()
# Output from `git log --date=local --format="%h,%ad"`
git_log = [line.strip() for line in open('log-date')]
git_log.reverse()
print('{},{}'.format('date','wc'))
for line in git_log:
sha, date = line.split(',')
checkout(sha)
wc = get_num_lines()
print('{},{}'.format(date, wc))
|
|
04dac71a79e1526ae25350413015c24ee67d608b
|
fabric/colors.py
|
fabric/colors.py
|
"""
Functions for wrapping strings in ANSI color codes.
"""
def _wrap_with(code):
def inner(s, bold=False):
c = code
if bold:
c = "1;%s" % c
return "\033[%sm%s\033[0m" % (c, s)
return inner
red = _wrap_with('31')
green = _wrap_with('32')
yellow = _wrap_with('33')
blue = _wrap_with('34')
magenta = _wrap_with('35')
cyan = _wrap_with('36')
white = _wrap_with('37')
|
Add tiny experimental color library.
|
Add tiny experimental color library.
Re #101.
|
Python
|
bsd-2-clause
|
pgroudas/fabric,rbramwell/fabric,getsentry/fabric,kmonsoor/fabric,ericholscher/fabric,SamuelMarks/fabric,bitmonk/fabric,haridsv/fabric,cmattoon/fabric,jaraco/fabric,rane-hs/fabric-py3,tolbkni/fabric,qinrong/fabric,MjAbuz/fabric,xLegoz/fabric,raimon49/fabric,akaariai/fabric,rodrigc/fabric,mathiasertl/fabric,pashinin/fabric,likesxuqiang/fabric,amaniak/fabric,fernandezcuesta/fabric,bspink/fabric,elijah513/fabric,tekapo/fabric,StackStorm/fabric,itoed/fabric,ploxiln/fabric,kxxoling/fabric,hrubi/fabric,sdelements/fabric,TarasRudnyk/fabric,cgvarela/fabric,askulkarni2/fabric,felix-d/fabric,opavader/fabric,bitprophet/fabric
|
Add tiny experimental color library.
Re #101.
|
"""
Functions for wrapping strings in ANSI color codes.
"""
def _wrap_with(code):
def inner(s, bold=False):
c = code
if bold:
c = "1;%s" % c
return "\033[%sm%s\033[0m" % (c, s)
return inner
red = _wrap_with('31')
green = _wrap_with('32')
yellow = _wrap_with('33')
blue = _wrap_with('34')
magenta = _wrap_with('35')
cyan = _wrap_with('36')
white = _wrap_with('37')
|
<commit_before><commit_msg>Add tiny experimental color library.
Re #101.<commit_after>
|
"""
Functions for wrapping strings in ANSI color codes.
"""
def _wrap_with(code):
def inner(s, bold=False):
c = code
if bold:
c = "1;%s" % c
return "\033[%sm%s\033[0m" % (c, s)
return inner
red = _wrap_with('31')
green = _wrap_with('32')
yellow = _wrap_with('33')
blue = _wrap_with('34')
magenta = _wrap_with('35')
cyan = _wrap_with('36')
white = _wrap_with('37')
|
Add tiny experimental color library.
Re #101."""
Functions for wrapping strings in ANSI color codes.
"""
def _wrap_with(code):
def inner(s, bold=False):
c = code
if bold:
c = "1;%s" % c
return "\033[%sm%s\033[0m" % (c, s)
return inner
red = _wrap_with('31')
green = _wrap_with('32')
yellow = _wrap_with('33')
blue = _wrap_with('34')
magenta = _wrap_with('35')
cyan = _wrap_with('36')
white = _wrap_with('37')
|
<commit_before><commit_msg>Add tiny experimental color library.
Re #101.<commit_after>"""
Functions for wrapping strings in ANSI color codes.
"""
def _wrap_with(code):
def inner(s, bold=False):
c = code
if bold:
c = "1;%s" % c
return "\033[%sm%s\033[0m" % (c, s)
return inner
red = _wrap_with('31')
green = _wrap_with('32')
yellow = _wrap_with('33')
blue = _wrap_with('34')
magenta = _wrap_with('35')
cyan = _wrap_with('36')
white = _wrap_with('37')
|
|
ca458444856dde7af337c3abb10e8913faaed88a
|
src/ggrc_workflows/migrations/versions/20150423173706_2b89912f95f1_add_notification_entries_for_existng_.py
|
src/ggrc_workflows/migrations/versions/20150423173706_2b89912f95f1_add_notification_entries_for_existng_.py
|
"""add notification entries for existng workflows
Revision ID: 2b89912f95f1
Revises: 27b09c761b4e
Create Date: 2015-04-23 17:37:06.366115
"""
from datetime import date
from sqlalchemy import and_
from ggrc import db
from ggrc.models import Notification
from ggrc_workflows.models import CycleTaskGroupObjectTask, Workflow
from ggrc_workflows.notification.notification_handler import (
add_cycle_task_due_notifications,
handle_workflow_modify,
get_notification_type,
)
# revision identifiers, used by Alembic.
revision = '2b89912f95f1'
down_revision = '27b09c761b4e'
def upgrade():
existing_tasks = CycleTaskGroupObjectTask.query.filter(and_(
CycleTaskGroupObjectTask.end_date >= date.today(),
CycleTaskGroupObjectTask.status != "Verified"
)).all()
for cycle_task in existing_tasks:
add_cycle_task_due_notifications(cycle_task)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
handle_workflow_modify(None, wf)
db.session.commit()
def downgrade():
delete_types_list = [
"cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today",
"weekly_workflow_starts_in",
"monthly_workflow_starts_in",
"quarterly_workflow_starts_in",
"annually_workflow_starts_in",
]
for delete_type in delete_types_list:
notif_type = get_notification_type(delete_type)
Notification.query.filter(
Notification.notification_type == notif_type).delete()
db.session.commit()
|
Add notifications for all existing objects
|
Add notifications for all existing objects
We assume that the past notifications have not been sent, so we need to
add notifications for all future cycle starts and task due dates.
|
Python
|
apache-2.0
|
josthkko/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core
|
Add notifications for all existing objects
We assume that the past notifications have not been sent, so we need to
add notifications for all future cycle starts and task due dates.
|
"""add notification entries for existng workflows
Revision ID: 2b89912f95f1
Revises: 27b09c761b4e
Create Date: 2015-04-23 17:37:06.366115
"""
from datetime import date
from sqlalchemy import and_
from ggrc import db
from ggrc.models import Notification
from ggrc_workflows.models import CycleTaskGroupObjectTask, Workflow
from ggrc_workflows.notification.notification_handler import (
add_cycle_task_due_notifications,
handle_workflow_modify,
get_notification_type,
)
# revision identifiers, used by Alembic.
revision = '2b89912f95f1'
down_revision = '27b09c761b4e'
def upgrade():
existing_tasks = CycleTaskGroupObjectTask.query.filter(and_(
CycleTaskGroupObjectTask.end_date >= date.today(),
CycleTaskGroupObjectTask.status != "Verified"
)).all()
for cycle_task in existing_tasks:
add_cycle_task_due_notifications(cycle_task)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
handle_workflow_modify(None, wf)
db.session.commit()
def downgrade():
delete_types_list = [
"cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today",
"weekly_workflow_starts_in",
"monthly_workflow_starts_in",
"quarterly_workflow_starts_in",
"annually_workflow_starts_in",
]
for delete_type in delete_types_list:
notif_type = get_notification_type(delete_type)
Notification.query.filter(
Notification.notification_type == notif_type).delete()
db.session.commit()
|
<commit_before><commit_msg>Add notifications for all existing objects
We assume that the past notifications have not been sent, so we need to
add notifications for all future cycle starts and task due dates.<commit_after>
|
"""add notification entries for existng workflows
Revision ID: 2b89912f95f1
Revises: 27b09c761b4e
Create Date: 2015-04-23 17:37:06.366115
"""
from datetime import date
from sqlalchemy import and_
from ggrc import db
from ggrc.models import Notification
from ggrc_workflows.models import CycleTaskGroupObjectTask, Workflow
from ggrc_workflows.notification.notification_handler import (
add_cycle_task_due_notifications,
handle_workflow_modify,
get_notification_type,
)
# revision identifiers, used by Alembic.
revision = '2b89912f95f1'
down_revision = '27b09c761b4e'
def upgrade():
existing_tasks = CycleTaskGroupObjectTask.query.filter(and_(
CycleTaskGroupObjectTask.end_date >= date.today(),
CycleTaskGroupObjectTask.status != "Verified"
)).all()
for cycle_task in existing_tasks:
add_cycle_task_due_notifications(cycle_task)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
handle_workflow_modify(None, wf)
db.session.commit()
def downgrade():
delete_types_list = [
"cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today",
"weekly_workflow_starts_in",
"monthly_workflow_starts_in",
"quarterly_workflow_starts_in",
"annually_workflow_starts_in",
]
for delete_type in delete_types_list:
notif_type = get_notification_type(delete_type)
Notification.query.filter(
Notification.notification_type == notif_type).delete()
db.session.commit()
|
Add notifications for all existing objects
We assume that the past notifications have not been sent, so we need to
add notifications for all future cycle starts and task due dates.
"""add notification entries for existng workflows
Revision ID: 2b89912f95f1
Revises: 27b09c761b4e
Create Date: 2015-04-23 17:37:06.366115
"""
from datetime import date
from sqlalchemy import and_
from ggrc import db
from ggrc.models import Notification
from ggrc_workflows.models import CycleTaskGroupObjectTask, Workflow
from ggrc_workflows.notification.notification_handler import (
add_cycle_task_due_notifications,
handle_workflow_modify,
get_notification_type,
)
# revision identifiers, used by Alembic.
revision = '2b89912f95f1'
down_revision = '27b09c761b4e'
def upgrade():
existing_tasks = CycleTaskGroupObjectTask.query.filter(and_(
CycleTaskGroupObjectTask.end_date >= date.today(),
CycleTaskGroupObjectTask.status != "Verified"
)).all()
for cycle_task in existing_tasks:
add_cycle_task_due_notifications(cycle_task)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
handle_workflow_modify(None, wf)
db.session.commit()
def downgrade():
delete_types_list = [
"cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today",
"weekly_workflow_starts_in",
"monthly_workflow_starts_in",
"quarterly_workflow_starts_in",
"annually_workflow_starts_in",
]
for delete_type in delete_types_list:
notif_type = get_notification_type(delete_type)
Notification.query.filter(
Notification.notification_type == notif_type).delete()
db.session.commit()
|
<commit_before><commit_msg>Add notifications for all existing objects
We assume that the past notifications have not been sent, so we need to
add notifications for all future cycle starts and task due dates.<commit_after>
"""add notification entries for existng workflows
Revision ID: 2b89912f95f1
Revises: 27b09c761b4e
Create Date: 2015-04-23 17:37:06.366115
"""
from datetime import date
from sqlalchemy import and_
from ggrc import db
from ggrc.models import Notification
from ggrc_workflows.models import CycleTaskGroupObjectTask, Workflow
from ggrc_workflows.notification.notification_handler import (
add_cycle_task_due_notifications,
handle_workflow_modify,
get_notification_type,
)
# revision identifiers, used by Alembic.
revision = '2b89912f95f1'
down_revision = '27b09c761b4e'
def upgrade():
existing_tasks = CycleTaskGroupObjectTask.query.filter(and_(
CycleTaskGroupObjectTask.end_date >= date.today(),
CycleTaskGroupObjectTask.status != "Verified"
)).all()
for cycle_task in existing_tasks:
add_cycle_task_due_notifications(cycle_task)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
handle_workflow_modify(None, wf)
db.session.commit()
def downgrade():
delete_types_list = [
"cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today",
"weekly_workflow_starts_in",
"monthly_workflow_starts_in",
"quarterly_workflow_starts_in",
"annually_workflow_starts_in",
]
for delete_type in delete_types_list:
notif_type = get_notification_type(delete_type)
Notification.query.filter(
Notification.notification_type == notif_type).delete()
db.session.commit()
|
|
98548ce5da63dc10d31e3d8f93d4ea99ccae1f48
|
tests/machines/merge_overlapping_intervals/vim_merge_overlapping_intervals_test.py
|
tests/machines/merge_overlapping_intervals/vim_merge_overlapping_intervals_test.py
|
import subprocess
from vim_turing_machine.machines.merge_overlapping_intervals.decode_intervals import decode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.encode_intervals import encode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.merge_overlapping_intervals import MergeOverlappingIntervalsGenerator
from vim_turing_machine.vim_constants import VIM_MACHINE_FILENAME
from vim_turing_machine.vim_machine import VimTuringMachine
NUM_BITS = 3
def run_vim_machine(intervals):
initial_tape = encode_intervals(intervals, NUM_BITS)
gen = MergeOverlappingIntervalsGenerator(NUM_BITS)
merge_overlapping_intervals = VimTuringMachine(gen.merge_overlapping_intervals_transitions(), debug=False)
# Write to the vim machine file
merge_overlapping_intervals.run(initial_tape=initial_tape)
subprocess.run(
[
'vim',
'-u',
'vimrc',
VIM_MACHINE_FILENAME,
'-c',
# Execute the vim machine and then save the resulting file
":execute 'normal gg0yy@\"' | :x",
],
timeout=10,
check=True,
)
def read_contents_of_tape():
with open(VIM_MACHINE_FILENAME, 'r') as f:
tape_lines = []
found_beginning_of_tape = False
for line in f:
# Look for the lines between '_t:' and 'notvalid'
if line.startswith('_t:'):
found_beginning_of_tape = True
elif line.startswith('notvalid'):
return convert_tape_to_string(tape_lines)
elif found_beginning_of_tape:
tape_lines.append(line)
raise AssertionError('Could not find the tape')
def convert_tape_to_string(tape_lines):
return ''.join(tape_lines).replace(' ', '').replace('\n', '')
def test_merge_intervals_in_vim():
run_vim_machine([[1, 2], [2, 3], [5, 7]])
tape = read_contents_of_tape()
intervals = decode_intervals(tape, num_bits=NUM_BITS)
assert intervals == [[1, 3], [5, 7]]
|
Add an integration test for the vim machine.
|
Add an integration test for the vim machine.
|
Python
|
mit
|
ealter/vim_turing_machine,ealter/vim_turing_machine
|
Add an integration test for the vim machine.
|
import subprocess
from vim_turing_machine.machines.merge_overlapping_intervals.decode_intervals import decode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.encode_intervals import encode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.merge_overlapping_intervals import MergeOverlappingIntervalsGenerator
from vim_turing_machine.vim_constants import VIM_MACHINE_FILENAME
from vim_turing_machine.vim_machine import VimTuringMachine
NUM_BITS = 3
def run_vim_machine(intervals):
initial_tape = encode_intervals(intervals, NUM_BITS)
gen = MergeOverlappingIntervalsGenerator(NUM_BITS)
merge_overlapping_intervals = VimTuringMachine(gen.merge_overlapping_intervals_transitions(), debug=False)
# Write to the vim machine file
merge_overlapping_intervals.run(initial_tape=initial_tape)
subprocess.run(
[
'vim',
'-u',
'vimrc',
VIM_MACHINE_FILENAME,
'-c',
# Execute the vim machine and then save the resulting file
":execute 'normal gg0yy@\"' | :x",
],
timeout=10,
check=True,
)
def read_contents_of_tape():
with open(VIM_MACHINE_FILENAME, 'r') as f:
tape_lines = []
found_beginning_of_tape = False
for line in f:
# Look for the lines between '_t:' and 'notvalid'
if line.startswith('_t:'):
found_beginning_of_tape = True
elif line.startswith('notvalid'):
return convert_tape_to_string(tape_lines)
elif found_beginning_of_tape:
tape_lines.append(line)
raise AssertionError('Could not find the tape')
def convert_tape_to_string(tape_lines):
return ''.join(tape_lines).replace(' ', '').replace('\n', '')
def test_merge_intervals_in_vim():
run_vim_machine([[1, 2], [2, 3], [5, 7]])
tape = read_contents_of_tape()
intervals = decode_intervals(tape, num_bits=NUM_BITS)
assert intervals == [[1, 3], [5, 7]]
|
<commit_before><commit_msg>Add an integration test for the vim machine.<commit_after>
|
import subprocess
from vim_turing_machine.machines.merge_overlapping_intervals.decode_intervals import decode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.encode_intervals import encode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.merge_overlapping_intervals import MergeOverlappingIntervalsGenerator
from vim_turing_machine.vim_constants import VIM_MACHINE_FILENAME
from vim_turing_machine.vim_machine import VimTuringMachine
NUM_BITS = 3
def run_vim_machine(intervals):
initial_tape = encode_intervals(intervals, NUM_BITS)
gen = MergeOverlappingIntervalsGenerator(NUM_BITS)
merge_overlapping_intervals = VimTuringMachine(gen.merge_overlapping_intervals_transitions(), debug=False)
# Write to the vim machine file
merge_overlapping_intervals.run(initial_tape=initial_tape)
subprocess.run(
[
'vim',
'-u',
'vimrc',
VIM_MACHINE_FILENAME,
'-c',
# Execute the vim machine and then save the resulting file
":execute 'normal gg0yy@\"' | :x",
],
timeout=10,
check=True,
)
def read_contents_of_tape():
with open(VIM_MACHINE_FILENAME, 'r') as f:
tape_lines = []
found_beginning_of_tape = False
for line in f:
# Look for the lines between '_t:' and 'notvalid'
if line.startswith('_t:'):
found_beginning_of_tape = True
elif line.startswith('notvalid'):
return convert_tape_to_string(tape_lines)
elif found_beginning_of_tape:
tape_lines.append(line)
raise AssertionError('Could not find the tape')
def convert_tape_to_string(tape_lines):
return ''.join(tape_lines).replace(' ', '').replace('\n', '')
def test_merge_intervals_in_vim():
run_vim_machine([[1, 2], [2, 3], [5, 7]])
tape = read_contents_of_tape()
intervals = decode_intervals(tape, num_bits=NUM_BITS)
assert intervals == [[1, 3], [5, 7]]
|
Add an integration test for the vim machine.import subprocess
from vim_turing_machine.machines.merge_overlapping_intervals.decode_intervals import decode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.encode_intervals import encode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.merge_overlapping_intervals import MergeOverlappingIntervalsGenerator
from vim_turing_machine.vim_constants import VIM_MACHINE_FILENAME
from vim_turing_machine.vim_machine import VimTuringMachine
NUM_BITS = 3
def run_vim_machine(intervals):
initial_tape = encode_intervals(intervals, NUM_BITS)
gen = MergeOverlappingIntervalsGenerator(NUM_BITS)
merge_overlapping_intervals = VimTuringMachine(gen.merge_overlapping_intervals_transitions(), debug=False)
# Write to the vim machine file
merge_overlapping_intervals.run(initial_tape=initial_tape)
subprocess.run(
[
'vim',
'-u',
'vimrc',
VIM_MACHINE_FILENAME,
'-c',
# Execute the vim machine and then save the resulting file
":execute 'normal gg0yy@\"' | :x",
],
timeout=10,
check=True,
)
def read_contents_of_tape():
with open(VIM_MACHINE_FILENAME, 'r') as f:
tape_lines = []
found_beginning_of_tape = False
for line in f:
# Look for the lines between '_t:' and 'notvalid'
if line.startswith('_t:'):
found_beginning_of_tape = True
elif line.startswith('notvalid'):
return convert_tape_to_string(tape_lines)
elif found_beginning_of_tape:
tape_lines.append(line)
raise AssertionError('Could not find the tape')
def convert_tape_to_string(tape_lines):
return ''.join(tape_lines).replace(' ', '').replace('\n', '')
def test_merge_intervals_in_vim():
run_vim_machine([[1, 2], [2, 3], [5, 7]])
tape = read_contents_of_tape()
intervals = decode_intervals(tape, num_bits=NUM_BITS)
assert intervals == [[1, 3], [5, 7]]
|
<commit_before><commit_msg>Add an integration test for the vim machine.<commit_after>import subprocess
from vim_turing_machine.machines.merge_overlapping_intervals.decode_intervals import decode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.encode_intervals import encode_intervals
from vim_turing_machine.machines.merge_overlapping_intervals.merge_overlapping_intervals import MergeOverlappingIntervalsGenerator
from vim_turing_machine.vim_constants import VIM_MACHINE_FILENAME
from vim_turing_machine.vim_machine import VimTuringMachine
NUM_BITS = 3
def run_vim_machine(intervals):
initial_tape = encode_intervals(intervals, NUM_BITS)
gen = MergeOverlappingIntervalsGenerator(NUM_BITS)
merge_overlapping_intervals = VimTuringMachine(gen.merge_overlapping_intervals_transitions(), debug=False)
# Write to the vim machine file
merge_overlapping_intervals.run(initial_tape=initial_tape)
subprocess.run(
[
'vim',
'-u',
'vimrc',
VIM_MACHINE_FILENAME,
'-c',
# Execute the vim machine and then save the resulting file
":execute 'normal gg0yy@\"' | :x",
],
timeout=10,
check=True,
)
def read_contents_of_tape():
with open(VIM_MACHINE_FILENAME, 'r') as f:
tape_lines = []
found_beginning_of_tape = False
for line in f:
# Look for the lines between '_t:' and 'notvalid'
if line.startswith('_t:'):
found_beginning_of_tape = True
elif line.startswith('notvalid'):
return convert_tape_to_string(tape_lines)
elif found_beginning_of_tape:
tape_lines.append(line)
raise AssertionError('Could not find the tape')
def convert_tape_to_string(tape_lines):
return ''.join(tape_lines).replace(' ', '').replace('\n', '')
def test_merge_intervals_in_vim():
run_vim_machine([[1, 2], [2, 3], [5, 7]])
tape = read_contents_of_tape()
intervals = decode_intervals(tape, num_bits=NUM_BITS)
assert intervals == [[1, 3], [5, 7]]
|
|
9f552d9df9ea850a178ced2b16d57e5e8849a8fe
|
tools/perf/page_sets/page_set_unittest.py
|
tools/perf/page_sets/page_set_unittest.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import decorators
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
# TODO(tbarzic): crbug.com/386416.
@decorators.Disabled('chromeos')
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
Disable failing testSmoke on cros.
|
Disable failing testSmoke on cros.
BUG=386416
TEST=bots
Review URL: https://codereview.chromium.org/336953004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@280661 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
ondra-novak/chromium.src,dednal/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,littlstar/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,markYoungH/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,dednal/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,dushu1203/chromium.src,dednal/chromium.src,littlstar/chromium.src,Chilledheart/chromium,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,ltilve/chromium,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,Just-D/chromium-1,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,ltilve/chromium,dednal/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Just-D/chromium-1,ltilve/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
Disable failing testSmoke on cros.
BUG=386416
TEST=bots
Review URL: https://codereview.chromium.org/336953004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@280661 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import decorators
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
# TODO(tbarzic): crbug.com/386416.
@decorators.Disabled('chromeos')
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
<commit_msg>Disable failing testSmoke on cros.
BUG=386416
TEST=bots
Review URL: https://codereview.chromium.org/336953004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@280661 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import decorators
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
# TODO(tbarzic): crbug.com/386416.
@decorators.Disabled('chromeos')
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
Disable failing testSmoke on cros.
BUG=386416
TEST=bots
Review URL: https://codereview.chromium.org/336953004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@280661 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import decorators
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
# TODO(tbarzic): crbug.com/386416.
@decorators.Disabled('chromeos')
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
<commit_msg>Disable failing testSmoke on cros.
BUG=386416
TEST=bots
Review URL: https://codereview.chromium.org/336953004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@280661 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import decorators
from telemetry.unittest import page_set_smoke_test
class PageSetUnitTest(page_set_smoke_test.PageSetSmokeTest):
# TODO(tbarzic): crbug.com/386416.
@decorators.Disabled('chromeos')
def testSmoke(self):
page_sets_dir = os.path.dirname(os.path.realpath(__file__))
top_level_dir = os.path.dirname(page_sets_dir)
self.RunSmokeTest(page_sets_dir, top_level_dir)
|
83cadc5fa67536316b9f00a6a16eadbe6e3f927b
|
common/test/acceptance/accessibility/test_lms_dashboard_axs.py
|
common/test/acceptance/accessibility/test_lms_dashboard_axs.py
|
"""
Accessibility tests for LMS dashboard page.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_lms_dashboard_axs.py
"""
from ..tests.lms.test_lms_dashboard import LmsDashboardPageTest
class LmsDashboardAxsTest(LmsDashboardPageTest):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_axs(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
report = self.dashboard_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 2 accessibility warnings.
self.assertEqual(2, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01',)),
msg="Unexpected warning: {}".format(warning))
|
Add a11y test for lms student dashboard
|
Add a11y test for lms student dashboard
|
Python
|
agpl-3.0
|
nagyistoce/edx-platform,jonathan-beard/edx-platform,jbzdak/edx-platform,romain-li/edx-platform,jbassen/edx-platform,chand3040/cloud_that,kamalx/edx-platform,J861449197/edx-platform,louyihua/edx-platform,martynovp/edx-platform,cpennington/edx-platform,nagyistoce/edx-platform,caesar2164/edx-platform,shashank971/edx-platform,appsembler/edx-platform,AkA84/edx-platform,ESOedX/edx-platform,J861449197/edx-platform,ovnicraft/edx-platform,kmoocdev2/edx-platform,shubhdev/edxOnBaadal,solashirai/edx-platform,Ayub-Khan/edx-platform,philanthropy-u/edx-platform,Edraak/circleci-edx-platform,openfun/edx-platform,naresh21/synergetics-edx-platform,Endika/edx-platform,gymnasium/edx-platform,arbrandes/edx-platform,SravanthiSinha/edx-platform,itsjeyd/edx-platform,jjmiranda/edx-platform,eduNEXT/edunext-platform,devs1991/test_edx_docmode,nanolearningllc/edx-platform-cypress-2,ovnicraft/edx-platform,B-MOOC/edx-platform,jamiefolsom/edx-platform,xingyepei/edx-platform,openfun/edx-platform,a-parhom/edx-platform,jbzdak/edx-platform,alexthered/kienhoc-platform,miptliot/edx-platform,inares/edx-platform,MakeHer/edx-platform,synergeticsedx/deployment-wipro,mushtaqak/edx-platform,jazkarta/edx-platform,inares/edx-platform,Stanford-Online/edx-platform,xingyepei/edx-platform,ahmedaljazzar/edx-platform,jazztpt/edx-platform,ahmedaljazzar/edx-platform,MakeHer/edx-platform,ahmadio/edx-platform,xuxiao19910803/edx-platform,a-parhom/edx-platform,leansoft/edx-platform,adoosii/edx-platform,CourseTalk/edx-platform,ahmadiga/min_edx,ubc/edx-platform,cognitiveclass/edx-platform,ak2703/edx-platform,kursitet/edx-platform,romain-li/edx-platform,knehez/edx-platform,ESOedX/edx-platform,IndonesiaX/edx-platform,doganov/edx-platform,eduNEXT/edx-platform,simbs/edx-platform,Edraak/edraak-platform,IndonesiaX/edx-platform,SivilTaram/edx-platform,msegado/edx-platform,prarthitm/edxplatform,adoosii/edx-platform,zhenzhai/edx-platform,hamzehd/edx-platform,AkA84/edx-platform,chauhanhardik/populo_2,xingyepei/edx-platform,leansoft/edx-platform,jbzdak/edx-platform,philanthropy-u/edx-platform,chand3040/cloud_that,chand3040/cloud_that,jbzdak/edx-platform,shubhdev/edxOnBaadal,amir-qayyum-khan/edx-platform,Shrhawk/edx-platform,polimediaupv/edx-platform,zubair-arbi/edx-platform,alexthered/kienhoc-platform,prarthitm/edxplatform,10clouds/edx-platform,utecuy/edx-platform,Edraak/edraak-platform,lduarte1991/edx-platform,jamesblunt/edx-platform,eduNEXT/edunext-platform,ahmadio/edx-platform,jjmiranda/edx-platform,vikas1885/test1,zofuthan/edx-platform,waheedahmed/edx-platform,jonathan-beard/edx-platform,mcgachey/edx-platform,Edraak/circleci-edx-platform,jbassen/edx-platform,arbrandes/edx-platform,Softmotions/edx-platform,ferabra/edx-platform,jzoldak/edx-platform,SivilTaram/edx-platform,IndonesiaX/edx-platform,chudaol/edx-platform,edx-solutions/edx-platform,romain-li/edx-platform,nikolas/edx-platform,jamesblunt/edx-platform,B-MOOC/edx-platform,devs1991/test_edx_docmode,devs1991/test_edx_docmode,Edraak/edx-platform,hastexo/edx-platform,IONISx/edx-platform,vasyarv/edx-platform,devs1991/test_edx_docmode,jbassen/edx-platform,procangroup/edx-platform,stvstnfrd/edx-platform,ZLLab-Mooc/edx-platform,cecep-edu/edx-platform,synergeticsedx/deployment-wipro,fintech-circle/edx-platform,wwj718/edx-platform,shubhdev/edx-platform,ZLLab-Mooc/edx-platform,edx/edx-platform,appsembler/edx-platform,SravanthiSinha/edx-platform,shubhdev/edxOnBaadal,EDUlib/edx-platform,shurihell/testasia,bigdatauniversity/edx-platform,proversity-org/edx-platform,jazztpt/edx-platform,doganov/edx-platform,martynovp/edx-platform,chauhanhardik/populo,kamalx/edx-platform,synergeticsedx/deployment-wipro,IONISx/edx-platform,devs1991/test_edx_docmode,marcore/edx-platform,ampax/edx-platform,ZLLab-Mooc/edx-platform,bigdatauniversity/edx-platform,nagyistoce/edx-platform,iivic/BoiseStateX,vasyarv/edx-platform,mitocw/edx-platform,lduarte1991/edx-platform,mbareta/edx-platform-ft,edx-solutions/edx-platform,hamzehd/edx-platform,amir-qayyum-khan/edx-platform,alu042/edx-platform,gsehub/edx-platform,nanolearningllc/edx-platform-cypress,Shrhawk/edx-platform,teltek/edx-platform,fly19890211/edx-platform,edx-solutions/edx-platform,edry/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearningllc/edx-platform-cypress-2,gymnasium/edx-platform,Edraak/edx-platform,Softmotions/edx-platform,halvertoluke/edx-platform,nanolearningllc/edx-platform-cypress,rismalrv/edx-platform,bitifirefly/edx-platform,etzhou/edx-platform,arbrandes/edx-platform,IONISx/edx-platform,Shrhawk/edx-platform,devs1991/test_edx_docmode,gymnasium/edx-platform,miptliot/edx-platform,jjmiranda/edx-platform,Lektorium-LLC/edx-platform,pomegranited/edx-platform,mcgachey/edx-platform,doganov/edx-platform,atsolakid/edx-platform,ovnicraft/edx-platform,caesar2164/edx-platform,atsolakid/edx-platform,4eek/edx-platform,analyseuc3m/ANALYSE-v1,cecep-edu/edx-platform,tanmaykm/edx-platform,tiagochiavericosta/edx-platform,synergeticsedx/deployment-wipro,EDUlib/edx-platform,chauhanhardik/populo,CredoReference/edx-platform,Livit/Livit.Learn.EdX,iivic/BoiseStateX,polimediaupv/edx-platform,miptliot/edx-platform,RPI-OPENEDX/edx-platform,itsjeyd/edx-platform,mjirayu/sit_academy,proversity-org/edx-platform,vikas1885/test1,mcgachey/edx-platform,inares/edx-platform,Ayub-Khan/edx-platform,doganov/edx-platform,4eek/edx-platform,caesar2164/edx-platform,kursitet/edx-platform,alu042/edx-platform,don-github/edx-platform,ahmadio/edx-platform,Kalyzee/edx-platform,alu042/edx-platform,TeachAtTUM/edx-platform,louyihua/edx-platform,nikolas/edx-platform,deepsrijit1105/edx-platform,angelapper/edx-platform,ahmadiga/min_edx,Ayub-Khan/edx-platform,halvertoluke/edx-platform,shurihell/testasia,nttks/edx-platform,Edraak/edx-platform,alexthered/kienhoc-platform,cpennington/edx-platform,jzoldak/edx-platform,angelapper/edx-platform,nagyistoce/edx-platform,benpatterson/edx-platform,mbareta/edx-platform-ft,Endika/edx-platform,utecuy/edx-platform,mbareta/edx-platform-ft,leansoft/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx-platform,jazztpt/edx-platform,JCBarahona/edX,lduarte1991/edx-platform,nikolas/edx-platform,ahmadiga/min_edx,pabloborrego93/edx-platform,JCBarahona/edX,zadgroup/edx-platform,naresh21/synergetics-edx-platform,nttks/edx-platform,playm2mboy/edx-platform,miptliot/edx-platform,itsjeyd/edx-platform,zerobatu/edx-platform,fintech-circle/edx-platform,fly19890211/edx-platform,pabloborrego93/edx-platform,msegado/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,B-MOOC/edx-platform,raccoongang/edx-platform,doismellburning/edx-platform,playm2mboy/edx-platform,marcore/edx-platform,arbrandes/edx-platform,ubc/edx-platform,atsolakid/edx-platform,xuxiao19910803/edx-platform,tiagochiavericosta/edx-platform,zerobatu/edx-platform,SravanthiSinha/edx-platform,kxliugang/edx-platform,ferabra/edx-platform,motion2015/edx-platform,zhenzhai/edx-platform,motion2015/edx-platform,xuxiao19910803/edx-platform,proversity-org/edx-platform,ferabra/edx-platform,atsolakid/edx-platform,JCBarahona/edX,xinjiguaike/edx-platform,nttks/edx-platform,devs1991/test_edx_docmode,mahendra-r/edx-platform,IONISx/edx-platform,zadgroup/edx-platform,TeachAtTUM/edx-platform,solashirai/edx-platform,Kalyzee/edx-platform,jamiefolsom/edx-platform,antoviaque/edx-platform,benpatterson/edx-platform,utecuy/edx-platform,xinjiguaike/edx-platform,wwj718/edx-platform,Semi-global/edx-platform,pomegranited/edx-platform,edx-solutions/edx-platform,kamalx/edx-platform,philanthropy-u/edx-platform,4eek/edx-platform,SravanthiSinha/edx-platform,raccoongang/edx-platform,polimediaupv/edx-platform,vasyarv/edx-platform,ESOedX/edx-platform,analyseuc3m/ANALYSE-v1,ampax/edx-platform,JioEducation/edx-platform,franosincic/edx-platform,jbassen/edx-platform,chauhanhardik/populo,bigdatauniversity/edx-platform,jolyonb/edx-platform,antoviaque/edx-platform,motion2015/edx-platform,jonathan-beard/edx-platform,jamiefolsom/edx-platform,jamesblunt/edx-platform,teltek/edx-platform,kursitet/edx-platform,rismalrv/edx-platform,hastexo/edx-platform,nttks/edx-platform,JCBarahona/edX,utecuy/edx-platform,benpatterson/edx-platform,leansoft/edx-platform,msegado/edx-platform,motion2015/edx-platform,edry/edx-platform,Softmotions/edx-platform,mjirayu/sit_academy,kxliugang/edx-platform,zofuthan/edx-platform,wwj718/edx-platform,shashank971/edx-platform,atsolakid/edx-platform,franosincic/edx-platform,JioEducation/edx-platform,Shrhawk/edx-platform,vikas1885/test1,zubair-arbi/edx-platform,fly19890211/edx-platform,nanolearningllc/edx-platform-cypress,benpatterson/edx-platform,nikolas/edx-platform,nagyistoce/edx-platform,nanolearningllc/edx-platform-cypress-2,alexthered/kienhoc-platform,edry/edx-platform,fintech-circle/edx-platform,JioEducation/edx-platform,marcore/edx-platform,CourseTalk/edx-platform,iivic/BoiseStateX,SivilTaram/edx-platform,jonathan-beard/edx-platform,martynovp/edx-platform,Kalyzee/edx-platform,UOMx/edx-platform,xuxiao19910803/edx,appsembler/edx-platform,chrisndodge/edx-platform,benpatterson/edx-platform,IndonesiaX/edx-platform,hamzehd/edx-platform,itsjeyd/edx-platform,xinjiguaike/edx-platform,tiagochiavericosta/edx-platform,vasyarv/edx-platform,waheedahmed/edx-platform,amir-qayyum-khan/edx-platform,nttks/edx-platform,stvstnfrd/edx-platform,CredoReference/edx-platform,tanmaykm/edx-platform,simbs/edx-platform,gsehub/edx-platform,RPI-OPENEDX/edx-platform,Edraak/edraak-platform,Semi-global/edx-platform,procangroup/edx-platform,cognitiveclass/edx-platform,knehez/edx-platform,ahmedaljazzar/edx-platform,shurihell/testasia,CredoReference/edx-platform,deepsrijit1105/edx-platform,jolyonb/edx-platform,B-MOOC/edx-platform,a-parhom/edx-platform,edx/edx-platform,jazkarta/edx-platform,ubc/edx-platform,pabloborrego93/edx-platform,ESOedX/edx-platform,halvertoluke/edx-platform,ahmadiga/min_edx,gsehub/edx-platform,kxliugang/edx-platform,zofuthan/edx-platform,angelapper/edx-platform,chand3040/cloud_that,pomegranited/edx-platform,ZLLab-Mooc/edx-platform,zadgroup/edx-platform,jbassen/edx-platform,mjirayu/sit_academy,chrisndodge/edx-platform,iivic/BoiseStateX,zofuthan/edx-platform,kamalx/edx-platform,chudaol/edx-platform,ahmadiga/min_edx,ak2703/edx-platform,hastexo/edx-platform,BehavioralInsightsTeam/edx-platform,zofuthan/edx-platform,ahmadio/edx-platform,RPI-OPENEDX/edx-platform,kursitet/edx-platform,eduNEXT/edx-platform,chudaol/edx-platform,Shrhawk/edx-platform,procangroup/edx-platform,mjirayu/sit_academy,wwj718/edx-platform,deepsrijit1105/edx-platform,kamalx/edx-platform,zadgroup/edx-platform,inares/edx-platform,Semi-global/edx-platform,antoviaque/edx-platform,Kalyzee/edx-platform,antoviaque/edx-platform,xuxiao19910803/edx,doismellburning/edx-platform,J861449197/edx-platform,Livit/Livit.Learn.EdX,prarthitm/edxplatform,analyseuc3m/ANALYSE-v1,solashirai/edx-platform,appliedx/edx-platform,bitifirefly/edx-platform,kxliugang/edx-platform,bigdatauniversity/edx-platform,prarthitm/edxplatform,MakeHer/edx-platform,defance/edx-platform,simbs/edx-platform,mahendra-r/edx-platform,pepeportela/edx-platform,ZLLab-Mooc/edx-platform,mcgachey/edx-platform,ovnicraft/edx-platform,EDUlib/edx-platform,chudaol/edx-platform,nanolearningllc/edx-platform-cypress-2,fly19890211/edx-platform,pepeportela/edx-platform,cecep-edu/edx-platform,doismellburning/edx-platform,SravanthiSinha/edx-platform,eduNEXT/edx-platform,utecuy/edx-platform,philanthropy-u/edx-platform,hamzehd/edx-platform,ak2703/edx-platform,shabab12/edx-platform,AkA84/edx-platform,J861449197/edx-platform,playm2mboy/edx-platform,kursitet/edx-platform,polimediaupv/edx-platform,mjirayu/sit_academy,pepeportela/edx-platform,4eek/edx-platform,shubhdev/edx-platform,doganov/edx-platform,caesar2164/edx-platform,jazkarta/edx-platform,chauhanhardik/populo,chrisndodge/edx-platform,jbzdak/edx-platform,MakeHer/edx-platform,ak2703/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,tanmaykm/edx-platform,arifsetiawan/edx-platform,shubhdev/edxOnBaadal,Kalyzee/edx-platform,playm2mboy/edx-platform,mushtaqak/edx-platform,jamesblunt/edx-platform,alu042/edx-platform,don-github/edx-platform,arifsetiawan/edx-platform,10clouds/edx-platform,edx/edx-platform,B-MOOC/edx-platform,proversity-org/edx-platform,mahendra-r/edx-platform,longmen21/edx-platform,edx/edx-platform,don-github/edx-platform,wwj718/edx-platform,RPI-OPENEDX/edx-platform,martynovp/edx-platform,polimediaupv/edx-platform,teltek/edx-platform,mahendra-r/edx-platform,arifsetiawan/edx-platform,edry/edx-platform,a-parhom/edx-platform,rismalrv/edx-platform,defance/edx-platform,alexthered/kienhoc-platform,Lektorium-LLC/edx-platform,ubc/edx-platform,longmen21/edx-platform,Edraak/circleci-edx-platform,rismalrv/edx-platform,marcore/edx-platform,procangroup/edx-platform,UOMx/edx-platform,bitifirefly/edx-platform,adoosii/edx-platform,etzhou/edx-platform,etzhou/edx-platform,chauhanhardik/populo_2,vasyarv/edx-platform,chudaol/edx-platform,jonathan-beard/edx-platform,mushtaqak/edx-platform,tiagochiavericosta/edx-platform,jzoldak/edx-platform,shubhdev/edx-platform,Stanford-Online/edx-platform,ahmadio/edx-platform,etzhou/edx-platform,shurihell/testasia,halvertoluke/edx-platform,jamesblunt/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress,eduNEXT/edunext-platform,mitocw/edx-platform,chauhanhardik/populo_2,Livit/Livit.Learn.EdX,cpennington/edx-platform,shashank971/edx-platform,pabloborrego93/edx-platform,naresh21/synergetics-edx-platform,Edraak/edx-platform,mbareta/edx-platform-ft,adoosii/edx-platform,Edraak/edraak-platform,doismellburning/edx-platform,iivic/BoiseStateX,mushtaqak/edx-platform,shashank971/edx-platform,kmoocdev2/edx-platform,JioEducation/edx-platform,Ayub-Khan/edx-platform,xingyepei/edx-platform,xinjiguaike/edx-platform,zerobatu/edx-platform,10clouds/edx-platform,kmoocdev2/edx-platform,xuxiao19910803/edx,BehavioralInsightsTeam/edx-platform,longmen21/edx-platform,shabab12/edx-platform,jazkarta/edx-platform,mitocw/edx-platform,jolyonb/edx-platform,msegado/edx-platform,hastexo/edx-platform,Semi-global/edx-platform,jazztpt/edx-platform,zadgroup/edx-platform,cognitiveclass/edx-platform,Edraak/edx-platform,teltek/edx-platform,waheedahmed/edx-platform,knehez/edx-platform,defance/edx-platform,ubc/edx-platform,vikas1885/test1,franosincic/edx-platform,zubair-arbi/edx-platform,zhenzhai/edx-platform,romain-li/edx-platform,openfun/edx-platform,shabab12/edx-platform,martynovp/edx-platform,franosincic/edx-platform,fly19890211/edx-platform,ak2703/edx-platform,stvstnfrd/edx-platform,chauhanhardik/populo,etzhou/edx-platform,appliedx/edx-platform,stvstnfrd/edx-platform,knehez/edx-platform,appsembler/edx-platform,arifsetiawan/edx-platform,Semi-global/edx-platform,analyseuc3m/ANALYSE-v1,defance/edx-platform,deepsrijit1105/edx-platform,romain-li/edx-platform,leansoft/edx-platform,xinjiguaike/edx-platform,mitocw/edx-platform,pepeportela/edx-platform,chrisndodge/edx-platform,kmoocdev2/edx-platform,Stanford-Online/edx-platform,eduNEXT/edunext-platform,arifsetiawan/edx-platform,AkA84/edx-platform,playm2mboy/edx-platform,MakeHer/edx-platform,don-github/edx-platform,franosincic/edx-platform,UOMx/edx-platform,angelapper/edx-platform,pomegranited/edx-platform,Stanford-Online/edx-platform,rismalrv/edx-platform,zhenzhai/edx-platform,zubair-arbi/edx-platform,BehavioralInsightsTeam/edx-platform,longmen21/edx-platform,chauhanhardik/populo_2,tiagochiavericosta/edx-platform,CourseTalk/edx-platform,openfun/edx-platform,jjmiranda/edx-platform,SivilTaram/edx-platform,CourseTalk/edx-platform,chand3040/cloud_that,Lektorium-LLC/edx-platform,ferabra/edx-platform,ferabra/edx-platform,CredoReference/edx-platform,adoosii/edx-platform,naresh21/synergetics-edx-platform,bitifirefly/edx-platform,simbs/edx-platform,shubhdev/edx-platform,openfun/edx-platform,Lektorium-LLC/edx-platform,EDUlib/edx-platform,hamzehd/edx-platform,nikolas/edx-platform,tanmaykm/edx-platform,shabab12/edx-platform,vikas1885/test1,cognitiveclass/edx-platform,knehez/edx-platform,edry/edx-platform,zerobatu/edx-platform,ovnicraft/edx-platform,msegado/edx-platform,cecep-edu/edx-platform,10clouds/edx-platform,eduNEXT/edx-platform,bigdatauniversity/edx-platform,solashirai/edx-platform,4eek/edx-platform,mahendra-r/edx-platform,chauhanhardik/populo_2,louyihua/edx-platform,cognitiveclass/edx-platform,xuxiao19910803/edx,appliedx/edx-platform,Softmotions/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,J861449197/edx-platform,Softmotions/edx-platform,kxliugang/edx-platform,waheedahmed/edx-platform,nanolearningllc/edx-platform-cypress,don-github/edx-platform,inares/edx-platform,kmoocdev2/edx-platform,mcgachey/edx-platform,solashirai/edx-platform,gymnasium/edx-platform,xingyepei/edx-platform,jamiefolsom/edx-platform,Edraak/circleci-edx-platform,Endika/edx-platform,jazkarta/edx-platform,Livit/Livit.Learn.EdX,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,zhenzhai/edx-platform,shubhdev/edxOnBaadal,jazztpt/edx-platform,longmen21/edx-platform,ampax/edx-platform,zerobatu/edx-platform,raccoongang/edx-platform,mushtaqak/edx-platform,waheedahmed/edx-platform,shurihell/testasia,TeachAtTUM/edx-platform,amir-qayyum-khan/edx-platform,appliedx/edx-platform,Endika/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx,Edraak/circleci-edx-platform,louyihua/edx-platform,shashank971/edx-platform,raccoongang/edx-platform,UOMx/edx-platform,zubair-arbi/edx-platform,jolyonb/edx-platform,TeachAtTUM/edx-platform,halvertoluke/edx-platform,IndonesiaX/edx-platform,xuxiao19910803/edx-platform,appliedx/edx-platform,JCBarahona/edX,AkA84/edx-platform,gsehub/edx-platform,bitifirefly/edx-platform,jamiefolsom/edx-platform,cpennington/edx-platform,IONISx/edx-platform,shubhdev/edx-platform,pomegranited/edx-platform,ampax/edx-platform,Ayub-Khan/edx-platform,motion2015/edx-platform
|
Add a11y test for lms student dashboard
|
"""
Accessibility tests for LMS dashboard page.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_lms_dashboard_axs.py
"""
from ..tests.lms.test_lms_dashboard import LmsDashboardPageTest
class LmsDashboardAxsTest(LmsDashboardPageTest):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_axs(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
report = self.dashboard_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 2 accessibility warnings.
self.assertEqual(2, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01',)),
msg="Unexpected warning: {}".format(warning))
|
<commit_before><commit_msg>Add a11y test for lms student dashboard<commit_after>
|
"""
Accessibility tests for LMS dashboard page.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_lms_dashboard_axs.py
"""
from ..tests.lms.test_lms_dashboard import LmsDashboardPageTest
class LmsDashboardAxsTest(LmsDashboardPageTest):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_axs(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
report = self.dashboard_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 2 accessibility warnings.
self.assertEqual(2, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01',)),
msg="Unexpected warning: {}".format(warning))
|
Add a11y test for lms student dashboard"""
Accessibility tests for LMS dashboard page.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_lms_dashboard_axs.py
"""
from ..tests.lms.test_lms_dashboard import LmsDashboardPageTest
class LmsDashboardAxsTest(LmsDashboardPageTest):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_axs(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
report = self.dashboard_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 2 accessibility warnings.
self.assertEqual(2, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01',)),
msg="Unexpected warning: {}".format(warning))
|
<commit_before><commit_msg>Add a11y test for lms student dashboard<commit_after>"""
Accessibility tests for LMS dashboard page.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_lms_dashboard_axs.py
"""
from ..tests.lms.test_lms_dashboard import LmsDashboardPageTest
class LmsDashboardAxsTest(LmsDashboardPageTest):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_axs(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
report = self.dashboard_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 2 accessibility warnings.
self.assertEqual(2, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01',)),
msg="Unexpected warning: {}".format(warning))
|
|
a38b49d40a10efe27e46a3c703bef4db1d24a3bb
|
ec2/launch_instance.py
|
ec2/launch_instance.py
|
#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
|
Add script to lauch ec2 instance
|
Add script to lauch ec2 instance
|
Python
|
apache-2.0
|
lvsl/go-to
|
Add script to lauch ec2 instance
|
#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
|
<commit_before><commit_msg>Add script to lauch ec2 instance<commit_after>
|
#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
|
Add script to lauch ec2 instance#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
|
<commit_before><commit_msg>Add script to lauch ec2 instance<commit_after>#!/usr/bin/env python
'''
Start new ec2 instance with open ssh port
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import os
import sys
import time
from datetime import datetime
import boto
import boto.ec2
# based on http://cloud-images.ubuntu.com/releases/precise/release/
INSTANCE_CONFIG = {
"ami": "ami-14907e63", # Ubuntu 12.04.3 LTS eu-west-1 64-bit instance
"region": "eu-west-1",
"type": "m1.small",
}
def main(config_path, name_prefix, tag):
with open(config_path) as f:
config = json.load(f)
ec2 = boto.ec2.connect_to_region(
INSTANCE_CONFIG['region'],
aws_access_key_id=config['access_key_id'],
aws_secret_access_key=config['secret_access_key'])
name = name_prefix + "-" + datetime.utcnow().isoformat()
# Assume that ssh key is uploaded
group = ec2.create_security_group(
name,
'A group that allows SSH access')
group.authorize('tcp', 22, 22, "0.0.0.0/0")
reservation = ec2.run_instances(
INSTANCE_CONFIG['ami'],
key_name=os.path.basename(config['certificate_path']).split(".")[0],
instance_type=INSTANCE_CONFIG['type'],
security_groups=[name])
# Find the actual Instance object inside the Reservation object
# returned by EC2.
instance = reservation.instances[0]
# The instance has been launched but it's not yet up and
# running. Let's wait for it's state to change to 'running'.
print 'waiting for instance'
while instance.state != 'running':
print '.',
time.sleep(1)
instance.update()
print 'done'
instance.add_tag(tag)
print "DoNe! To connect use:"
print "ssh -i {} ubuntu@{}".format(
config['certificate_path'],
instance.public_dns_name
)
if __name__ == "__main__":
if len(sys.argv) != 4:
sys.stderr.write("Usage:\n {} <config-path> <name-prefix> <tag>\n".format(sys.argv[0]))
sys.exit(1)
main(*sys.argv[1:])
|
|
b1371dc1501da997de7777f32918b920de3a4f3f
|
state_machine.py
|
state_machine.py
|
from enum import Enum
class match_state(Enum):
state_P1_win = 0
state_P2_win = 1
state_tie = 2
state_betting_open = 3
state_regular_match = 4
state_invalid = 5
def function_P1_win():
pass
def function_P2_win():
pass
def function_tie():
pass
def function_betting_open():
pass
def function_regular_match():
pass
|
Add skeleton code for state machine
|
Add skeleton code for state machine
State machine module will be implemented to allow for better unit
testing. We can better test each individual match mode using this
method.
|
Python
|
mit
|
Jacobinski/SaltBot
|
Add skeleton code for state machine
State machine module will be implemented to allow for better unit
testing. We can better test each individual match mode using this
method.
|
from enum import Enum
class match_state(Enum):
state_P1_win = 0
state_P2_win = 1
state_tie = 2
state_betting_open = 3
state_regular_match = 4
state_invalid = 5
def function_P1_win():
pass
def function_P2_win():
pass
def function_tie():
pass
def function_betting_open():
pass
def function_regular_match():
pass
|
<commit_before><commit_msg>Add skeleton code for state machine
State machine module will be implemented to allow for better unit
testing. We can better test each individual match mode using this
method.<commit_after>
|
from enum import Enum
class match_state(Enum):
state_P1_win = 0
state_P2_win = 1
state_tie = 2
state_betting_open = 3
state_regular_match = 4
state_invalid = 5
def function_P1_win():
pass
def function_P2_win():
pass
def function_tie():
pass
def function_betting_open():
pass
def function_regular_match():
pass
|
Add skeleton code for state machine
State machine module will be implemented to allow for better unit
testing. We can better test each individual match mode using this
method.from enum import Enum
class match_state(Enum):
state_P1_win = 0
state_P2_win = 1
state_tie = 2
state_betting_open = 3
state_regular_match = 4
state_invalid = 5
def function_P1_win():
pass
def function_P2_win():
pass
def function_tie():
pass
def function_betting_open():
pass
def function_regular_match():
pass
|
<commit_before><commit_msg>Add skeleton code for state machine
State machine module will be implemented to allow for better unit
testing. We can better test each individual match mode using this
method.<commit_after>from enum import Enum
class match_state(Enum):
state_P1_win = 0
state_P2_win = 1
state_tie = 2
state_betting_open = 3
state_regular_match = 4
state_invalid = 5
def function_P1_win():
pass
def function_P2_win():
pass
def function_tie():
pass
def function_betting_open():
pass
def function_regular_match():
pass
|
|
bbbb707b842d7069e77ae9dc99de246caf314a44
|
openstack_dashboard/enabled/_1610_orchestration_panel_group.py
|
openstack_dashboard/enabled/_1610_orchestration_panel_group.py
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Ochestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Orchestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
Fix the misspell Ochestration to Orchestration
|
Fix the misspell Ochestration to Orchestration
Now the 'Ochestration' is misspell
This patch fix it
Change-Id: I7723354d23f31f3320439ae025c561082bb95115
|
Python
|
apache-2.0
|
NeCTAR-RC/horizon,NeCTAR-RC/horizon,Tesora/tesora-horizon,openstack/horizon,redhat-openstack/horizon,BiznetGIO/horizon,karthik-suresh/horizon,anthonydillon/horizon,yeming233/horizon,redhat-cip/horizon,tqtran7/horizon,endorphinl/horizon-fork,gerrive/horizon,Dark-Hacker/horizon,takeshineshiro/horizon,philoniare/horizon,davidcusatis/horizon,idjaw/horizon,philoniare/horizon,Mirantis/mos-horizon,endorphinl/horizon-fork,noironetworks/horizon,davidcusatis/horizon,sandvine/horizon,doug-fish/horizon,endorphinl/horizon,saydulk/horizon,philoniare/horizon,FNST-OpenStack/horizon,django-leonardo/horizon,Tesora/tesora-horizon,Tesora/tesora-horizon,mdavid/horizon,coreycb/horizon,tqtran7/horizon,idjaw/horizon,bigswitch/horizon,ChameleonCloud/horizon,django-leonardo/horizon,mdavid/horizon,dan1/horizon-x509,BiznetGIO/horizon,anthonydillon/horizon,sandvine/horizon,saydulk/horizon,gerrive/horizon,redhat-openstack/horizon,sandvine/horizon,ChameleonCloud/horizon,ChameleonCloud/horizon,endorphinl/horizon,karthik-suresh/horizon,watonyweng/horizon,damien-dg/horizon,mdavid/horizon,philoniare/horizon,vladryk/horizon,maestro-hybrid-cloud/horizon,damien-dg/horizon,BiznetGIO/horizon,damien-dg/horizon,maestro-hybrid-cloud/horizon,davidcusatis/horizon,Dark-Hacker/horizon,wolverineav/horizon,yeming233/horizon,maestro-hybrid-cloud/horizon,bigswitch/horizon,NeCTAR-RC/horizon,FNST-OpenStack/horizon,Dark-Hacker/horizon,saydulk/horizon,saydulk/horizon,ChameleonCloud/horizon,openstack/horizon,anthonydillon/horizon,redhat-openstack/horizon,Dark-Hacker/horizon,takeshineshiro/horizon,openstack/horizon,idjaw/horizon,yeming233/horizon,Mirantis/mos-horizon,davidcusatis/horizon,openstack/horizon,doug-fish/horizon,vladryk/horizon,coreycb/horizon,bigswitch/horizon,dan1/horizon-x509,watonyweng/horizon,anthonydillon/horizon,bac/horizon,coreycb/horizon,redhat-cip/horizon,BiznetGIO/horizon,karthik-suresh/horizon,mdavid/horizon,bac/horizon,wolverineav/horizon,bigswitch/horizon,wolverineav/horizon,watonyweng/horizon,gerrive/horizon,redhat-openstack/horizon,FNST-OpenStack/horizon,redhat-cip/horizon,django-leonardo/horizon,noironetworks/horizon,yeming233/horizon,coreycb/horizon,maestro-hybrid-cloud/horizon,takeshineshiro/horizon,Mirantis/mos-horizon,endorphinl/horizon,Tesora/tesora-horizon,tqtran7/horizon,watonyweng/horizon,tqtran7/horizon,wolverineav/horizon,bac/horizon,doug-fish/horizon,Mirantis/mos-horizon,gerrive/horizon,vladryk/horizon,bac/horizon,damien-dg/horizon,redhat-cip/horizon,FNST-OpenStack/horizon,NeCTAR-RC/horizon,endorphinl/horizon,takeshineshiro/horizon,idjaw/horizon,noironetworks/horizon,dan1/horizon-x509,endorphinl/horizon-fork,noironetworks/horizon,karthik-suresh/horizon,vladryk/horizon,dan1/horizon-x509,sandvine/horizon,endorphinl/horizon-fork,django-leonardo/horizon,doug-fish/horizon
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Ochestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
Fix the misspell Ochestration to Orchestration
Now the 'Ochestration' is misspell
This patch fix it
Change-Id: I7723354d23f31f3320439ae025c561082bb95115
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Orchestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
<commit_before>from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Ochestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
<commit_msg>Fix the misspell Ochestration to Orchestration
Now the 'Ochestration' is misspell
This patch fix it
Change-Id: I7723354d23f31f3320439ae025c561082bb95115<commit_after>
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Orchestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Ochestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
Fix the misspell Ochestration to Orchestration
Now the 'Ochestration' is misspell
This patch fix it
Change-Id: I7723354d23f31f3320439ae025c561082bb95115from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Orchestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
<commit_before>from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Ochestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
<commit_msg>Fix the misspell Ochestration to Orchestration
Now the 'Ochestration' is misspell
This patch fix it
Change-Id: I7723354d23f31f3320439ae025c561082bb95115<commit_after>from django.utils.translation import ugettext_lazy as _
# The slug of the panel group to be added to HORIZON_CONFIG. Required.
PANEL_GROUP = 'orchestration'
# The display name of the PANEL_GROUP. Required.
PANEL_GROUP_NAME = _('Orchestration')
# The slug of the dashboard the PANEL_GROUP associated with. Required.
PANEL_GROUP_DASHBOARD = 'project'
|
14147d9db9760b378b0cc924fcecbe11a75ebda0
|
examples/bot_vs_bot.py
|
examples/bot_vs_bot.py
|
import sc2
from sc2 import Race
from sc2.player import Bot
from zerg_rush import ZergRushBot
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, ZergRushBot()),
Bot(Race.Zerg, ZergRushBot())
], realtime=False)
if __name__ == '__main__':
main()
|
Add bot vs bot example
|
Add bot vs bot example
|
Python
|
mit
|
Dentosal/python-sc2
|
Add bot vs bot example
|
import sc2
from sc2 import Race
from sc2.player import Bot
from zerg_rush import ZergRushBot
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, ZergRushBot()),
Bot(Race.Zerg, ZergRushBot())
], realtime=False)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add bot vs bot example<commit_after>
|
import sc2
from sc2 import Race
from sc2.player import Bot
from zerg_rush import ZergRushBot
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, ZergRushBot()),
Bot(Race.Zerg, ZergRushBot())
], realtime=False)
if __name__ == '__main__':
main()
|
Add bot vs bot exampleimport sc2
from sc2 import Race
from sc2.player import Bot
from zerg_rush import ZergRushBot
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, ZergRushBot()),
Bot(Race.Zerg, ZergRushBot())
], realtime=False)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add bot vs bot example<commit_after>import sc2
from sc2 import Race
from sc2.player import Bot
from zerg_rush import ZergRushBot
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Zerg, ZergRushBot()),
Bot(Race.Zerg, ZergRushBot())
], realtime=False)
if __name__ == '__main__':
main()
|
|
cef547a9f05fc39ee667606b6561ea3f106a7018
|
spacy/tests/serialize/test_serialize_tensorizer.py
|
spacy/tests/serialize/test_serialize_tensorizer.py
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
from ...pipeline import TokenVectorEncoder as Tensorizer
import pytest
def test_serialize_tensorizer_roundtrip_bytes(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
tensorizer_b = tensorizer.to_bytes()
new_tensorizer = Tensorizer(en_vocab).from_bytes(tensorizer_b)
assert new_tensorizer.to_bytes() == tensorizer_b
def test_serialize_tensorizer_roundtrip_disk(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
with make_tempdir() as d:
file_path = d / 'tensorizer'
tensorizer.to_disk(file_path)
tensorizer_d = Tensorizer(en_vocab).from_disk(file_path)
assert tensorizer.to_bytes() == tensorizer_d.to_bytes()
|
Add serialization tests for tensorizer
|
Add serialization tests for tensorizer
|
Python
|
mit
|
spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy
|
Add serialization tests for tensorizer
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
from ...pipeline import TokenVectorEncoder as Tensorizer
import pytest
def test_serialize_tensorizer_roundtrip_bytes(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
tensorizer_b = tensorizer.to_bytes()
new_tensorizer = Tensorizer(en_vocab).from_bytes(tensorizer_b)
assert new_tensorizer.to_bytes() == tensorizer_b
def test_serialize_tensorizer_roundtrip_disk(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
with make_tempdir() as d:
file_path = d / 'tensorizer'
tensorizer.to_disk(file_path)
tensorizer_d = Tensorizer(en_vocab).from_disk(file_path)
assert tensorizer.to_bytes() == tensorizer_d.to_bytes()
|
<commit_before><commit_msg>Add serialization tests for tensorizer<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
from ...pipeline import TokenVectorEncoder as Tensorizer
import pytest
def test_serialize_tensorizer_roundtrip_bytes(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
tensorizer_b = tensorizer.to_bytes()
new_tensorizer = Tensorizer(en_vocab).from_bytes(tensorizer_b)
assert new_tensorizer.to_bytes() == tensorizer_b
def test_serialize_tensorizer_roundtrip_disk(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
with make_tempdir() as d:
file_path = d / 'tensorizer'
tensorizer.to_disk(file_path)
tensorizer_d = Tensorizer(en_vocab).from_disk(file_path)
assert tensorizer.to_bytes() == tensorizer_d.to_bytes()
|
Add serialization tests for tensorizer# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
from ...pipeline import TokenVectorEncoder as Tensorizer
import pytest
def test_serialize_tensorizer_roundtrip_bytes(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
tensorizer_b = tensorizer.to_bytes()
new_tensorizer = Tensorizer(en_vocab).from_bytes(tensorizer_b)
assert new_tensorizer.to_bytes() == tensorizer_b
def test_serialize_tensorizer_roundtrip_disk(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
with make_tempdir() as d:
file_path = d / 'tensorizer'
tensorizer.to_disk(file_path)
tensorizer_d = Tensorizer(en_vocab).from_disk(file_path)
assert tensorizer.to_bytes() == tensorizer_d.to_bytes()
|
<commit_before><commit_msg>Add serialization tests for tensorizer<commit_after># coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
from ...pipeline import TokenVectorEncoder as Tensorizer
import pytest
def test_serialize_tensorizer_roundtrip_bytes(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
tensorizer_b = tensorizer.to_bytes()
new_tensorizer = Tensorizer(en_vocab).from_bytes(tensorizer_b)
assert new_tensorizer.to_bytes() == tensorizer_b
def test_serialize_tensorizer_roundtrip_disk(en_vocab):
tensorizer = Tensorizer(en_vocab)
tensorizer.model = tensorizer.Model()
with make_tempdir() as d:
file_path = d / 'tensorizer'
tensorizer.to_disk(file_path)
tensorizer_d = Tensorizer(en_vocab).from_disk(file_path)
assert tensorizer.to_bytes() == tensorizer_d.to_bytes()
|
|
6ad2a6b20762ebeb842a0d75a4000fc6246d4770
|
two-factor-authentication/verify-webhook/verify-webhook.6.x.py
|
two-factor-authentication/verify-webhook/verify-webhook.6.x.py
|
import base64
import hashlib
import hmac
from django.conf import settings
from django.http import HttpResponseForbidden
from functools import wraps
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
def validate_authy_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
url = request.build_absolute_uri('?')
method = request.method
params = getattr(request, method).items()
sorted_params = urlencode(sorted(params))
# Read the nonce from the request
nonce = request.META['HTTP_X_AUTHY_SIGNATURE_NONCE']
# Concatinate all together and separate by '|'
data = '|'.join([nonce, method, url, sorted_params])
# Compute the signature
computed_dig = hmac.new(
settings.ACCOUNT_SECURITY_API_KEY.encode('utf-8'),
msg=data.encode('utf-8'),
digestmod=hashlib.sha256
).digest()
computed_sig = base64.b64encode(computed_dig)
sig = request.META['HTTP_X_AUTHY_SIGNATURE']
# Compare the message signature with your calculated signature
# Continue processing the request if it's valid, return a 403 error if
# it's not
if sig == computed_sig:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
|
Add verify 2FA webhook signature Python snippet
|
Add verify 2FA webhook signature Python snippet
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
|
Add verify 2FA webhook signature Python snippet
|
import base64
import hashlib
import hmac
from django.conf import settings
from django.http import HttpResponseForbidden
from functools import wraps
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
def validate_authy_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
url = request.build_absolute_uri('?')
method = request.method
params = getattr(request, method).items()
sorted_params = urlencode(sorted(params))
# Read the nonce from the request
nonce = request.META['HTTP_X_AUTHY_SIGNATURE_NONCE']
# Concatinate all together and separate by '|'
data = '|'.join([nonce, method, url, sorted_params])
# Compute the signature
computed_dig = hmac.new(
settings.ACCOUNT_SECURITY_API_KEY.encode('utf-8'),
msg=data.encode('utf-8'),
digestmod=hashlib.sha256
).digest()
computed_sig = base64.b64encode(computed_dig)
sig = request.META['HTTP_X_AUTHY_SIGNATURE']
# Compare the message signature with your calculated signature
# Continue processing the request if it's valid, return a 403 error if
# it's not
if sig == computed_sig:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
|
<commit_before><commit_msg>Add verify 2FA webhook signature Python snippet<commit_after>
|
import base64
import hashlib
import hmac
from django.conf import settings
from django.http import HttpResponseForbidden
from functools import wraps
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
def validate_authy_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
url = request.build_absolute_uri('?')
method = request.method
params = getattr(request, method).items()
sorted_params = urlencode(sorted(params))
# Read the nonce from the request
nonce = request.META['HTTP_X_AUTHY_SIGNATURE_NONCE']
# Concatinate all together and separate by '|'
data = '|'.join([nonce, method, url, sorted_params])
# Compute the signature
computed_dig = hmac.new(
settings.ACCOUNT_SECURITY_API_KEY.encode('utf-8'),
msg=data.encode('utf-8'),
digestmod=hashlib.sha256
).digest()
computed_sig = base64.b64encode(computed_dig)
sig = request.META['HTTP_X_AUTHY_SIGNATURE']
# Compare the message signature with your calculated signature
# Continue processing the request if it's valid, return a 403 error if
# it's not
if sig == computed_sig:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
|
Add verify 2FA webhook signature Python snippetimport base64
import hashlib
import hmac
from django.conf import settings
from django.http import HttpResponseForbidden
from functools import wraps
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
def validate_authy_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
url = request.build_absolute_uri('?')
method = request.method
params = getattr(request, method).items()
sorted_params = urlencode(sorted(params))
# Read the nonce from the request
nonce = request.META['HTTP_X_AUTHY_SIGNATURE_NONCE']
# Concatinate all together and separate by '|'
data = '|'.join([nonce, method, url, sorted_params])
# Compute the signature
computed_dig = hmac.new(
settings.ACCOUNT_SECURITY_API_KEY.encode('utf-8'),
msg=data.encode('utf-8'),
digestmod=hashlib.sha256
).digest()
computed_sig = base64.b64encode(computed_dig)
sig = request.META['HTTP_X_AUTHY_SIGNATURE']
# Compare the message signature with your calculated signature
# Continue processing the request if it's valid, return a 403 error if
# it's not
if sig == computed_sig:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
|
<commit_before><commit_msg>Add verify 2FA webhook signature Python snippet<commit_after>import base64
import hashlib
import hmac
from django.conf import settings
from django.http import HttpResponseForbidden
from functools import wraps
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
def validate_authy_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(request, *args, **kwargs):
url = request.build_absolute_uri('?')
method = request.method
params = getattr(request, method).items()
sorted_params = urlencode(sorted(params))
# Read the nonce from the request
nonce = request.META['HTTP_X_AUTHY_SIGNATURE_NONCE']
# Concatinate all together and separate by '|'
data = '|'.join([nonce, method, url, sorted_params])
# Compute the signature
computed_dig = hmac.new(
settings.ACCOUNT_SECURITY_API_KEY.encode('utf-8'),
msg=data.encode('utf-8'),
digestmod=hashlib.sha256
).digest()
computed_sig = base64.b64encode(computed_dig)
sig = request.META['HTTP_X_AUTHY_SIGNATURE']
# Compare the message signature with your calculated signature
# Continue processing the request if it's valid, return a 403 error if
# it's not
if sig == computed_sig:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return decorated_function
|
|
d5528f5d5a5a24cdc77e87fcadaa3450fe6d3e3e
|
playbooks/appsemblerFiles/metalogixUserReport.py
|
playbooks/appsemblerFiles/metalogixUserReport.py
|
"""
First do the following:
ssh to the Metalogix server
```
(if necessary): sudo mv /tmp/metalogix_users.csv /tmp/metalogix_users.(date).csv
sudo su edxapp -s /bin/bash
cd ~/edx-platform
source ~/edxapp_env
./manage.py lms 6002exportusers.csv --settings=aws_appsembler
```
This will output the file transfer_users.txt.
Then
```
python
```
and paste in this code.
Exit the Metalogix server terminal session, `scp` the file to your local drive
and email to Cathy or other contact at Metalogix.
"""
import json
import csv
fp = open('/tmp/metalogix_users.csv', 'w')
jsonfp = open('transfer_users.txt', 'r')
userjson = jsonfp.read()
users = json.loads(userjson)
writer = csv.writer(fp, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
for user in users:
user_id = user['up']['user_id']
username = user['u']['username']
fullname = user['up']['name']
emaildomain = user['u']['email'].split('@')[1]
isactive = user['u']['is_active']
lastlogin = user['u']['last_login']
output_data = [user_id, username, fullname, emaildomain, isactive, lastlogin]
encoded_row = [unicode(s).encode('utf-8') for s in output_data]
writer.writerow(encoded_row)
fp.close()
|
Add a custom user report generation script for Metalogix, with instructions.
|
Add a custom user report generation script for Metalogix, with
instructions.
|
Python
|
agpl-3.0
|
appsembler/configuration,appsembler/configuration,appsembler/configuration,appsembler/configuration
|
Add a custom user report generation script for Metalogix, with
instructions.
|
"""
First do the following:
ssh to the Metalogix server
```
(if necessary): sudo mv /tmp/metalogix_users.csv /tmp/metalogix_users.(date).csv
sudo su edxapp -s /bin/bash
cd ~/edx-platform
source ~/edxapp_env
./manage.py lms 6002exportusers.csv --settings=aws_appsembler
```
This will output the file transfer_users.txt.
Then
```
python
```
and paste in this code.
Exit the Metalogix server terminal session, `scp` the file to your local drive
and email to Cathy or other contact at Metalogix.
"""
import json
import csv
fp = open('/tmp/metalogix_users.csv', 'w')
jsonfp = open('transfer_users.txt', 'r')
userjson = jsonfp.read()
users = json.loads(userjson)
writer = csv.writer(fp, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
for user in users:
user_id = user['up']['user_id']
username = user['u']['username']
fullname = user['up']['name']
emaildomain = user['u']['email'].split('@')[1]
isactive = user['u']['is_active']
lastlogin = user['u']['last_login']
output_data = [user_id, username, fullname, emaildomain, isactive, lastlogin]
encoded_row = [unicode(s).encode('utf-8') for s in output_data]
writer.writerow(encoded_row)
fp.close()
|
<commit_before><commit_msg>Add a custom user report generation script for Metalogix, with
instructions.<commit_after>
|
"""
First do the following:
ssh to the Metalogix server
```
(if necessary): sudo mv /tmp/metalogix_users.csv /tmp/metalogix_users.(date).csv
sudo su edxapp -s /bin/bash
cd ~/edx-platform
source ~/edxapp_env
./manage.py lms 6002exportusers.csv --settings=aws_appsembler
```
This will output the file transfer_users.txt.
Then
```
python
```
and paste in this code.
Exit the Metalogix server terminal session, `scp` the file to your local drive
and email to Cathy or other contact at Metalogix.
"""
import json
import csv
fp = open('/tmp/metalogix_users.csv', 'w')
jsonfp = open('transfer_users.txt', 'r')
userjson = jsonfp.read()
users = json.loads(userjson)
writer = csv.writer(fp, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
for user in users:
user_id = user['up']['user_id']
username = user['u']['username']
fullname = user['up']['name']
emaildomain = user['u']['email'].split('@')[1]
isactive = user['u']['is_active']
lastlogin = user['u']['last_login']
output_data = [user_id, username, fullname, emaildomain, isactive, lastlogin]
encoded_row = [unicode(s).encode('utf-8') for s in output_data]
writer.writerow(encoded_row)
fp.close()
|
Add a custom user report generation script for Metalogix, with
instructions."""
First do the following:
ssh to the Metalogix server
```
(if necessary): sudo mv /tmp/metalogix_users.csv /tmp/metalogix_users.(date).csv
sudo su edxapp -s /bin/bash
cd ~/edx-platform
source ~/edxapp_env
./manage.py lms 6002exportusers.csv --settings=aws_appsembler
```
This will output the file transfer_users.txt.
Then
```
python
```
and paste in this code.
Exit the Metalogix server terminal session, `scp` the file to your local drive
and email to Cathy or other contact at Metalogix.
"""
import json
import csv
fp = open('/tmp/metalogix_users.csv', 'w')
jsonfp = open('transfer_users.txt', 'r')
userjson = jsonfp.read()
users = json.loads(userjson)
writer = csv.writer(fp, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
for user in users:
user_id = user['up']['user_id']
username = user['u']['username']
fullname = user['up']['name']
emaildomain = user['u']['email'].split('@')[1]
isactive = user['u']['is_active']
lastlogin = user['u']['last_login']
output_data = [user_id, username, fullname, emaildomain, isactive, lastlogin]
encoded_row = [unicode(s).encode('utf-8') for s in output_data]
writer.writerow(encoded_row)
fp.close()
|
<commit_before><commit_msg>Add a custom user report generation script for Metalogix, with
instructions.<commit_after>"""
First do the following:
ssh to the Metalogix server
```
(if necessary): sudo mv /tmp/metalogix_users.csv /tmp/metalogix_users.(date).csv
sudo su edxapp -s /bin/bash
cd ~/edx-platform
source ~/edxapp_env
./manage.py lms 6002exportusers.csv --settings=aws_appsembler
```
This will output the file transfer_users.txt.
Then
```
python
```
and paste in this code.
Exit the Metalogix server terminal session, `scp` the file to your local drive
and email to Cathy or other contact at Metalogix.
"""
import json
import csv
fp = open('/tmp/metalogix_users.csv', 'w')
jsonfp = open('transfer_users.txt', 'r')
userjson = jsonfp.read()
users = json.loads(userjson)
writer = csv.writer(fp, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
for user in users:
user_id = user['up']['user_id']
username = user['u']['username']
fullname = user['up']['name']
emaildomain = user['u']['email'].split('@')[1]
isactive = user['u']['is_active']
lastlogin = user['u']['last_login']
output_data = [user_id, username, fullname, emaildomain, isactive, lastlogin]
encoded_row = [unicode(s).encode('utf-8') for s in output_data]
writer.writerow(encoded_row)
fp.close()
|
|
f0d24586e962078d87a7fad190435bdd17405c8f
|
labonneboite/alembic/versions/36188da1a919_allow_empty_alternance_email_in_save.py
|
labonneboite/alembic/versions/36188da1a919_allow_empty_alternance_email_in_save.py
|
"""
Allow empty alternance email in SAVE
Revision ID: 36188da1a919
Revises: 6a40b75d390a
Create Date: 2021-07-19 10:33:57.780826
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa
# Revision identifiers, used by Alembic.
revision = '36188da1a919'
down_revision = '6a40b75d390a'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
def downgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
|
Allow empty alterance_email in save tables
|
Allow empty alterance_email in save tables
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
Allow empty alterance_email in save tables
|
"""
Allow empty alternance email in SAVE
Revision ID: 36188da1a919
Revises: 6a40b75d390a
Create Date: 2021-07-19 10:33:57.780826
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa
# Revision identifiers, used by Alembic.
revision = '36188da1a919'
down_revision = '6a40b75d390a'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
def downgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
|
<commit_before><commit_msg>Allow empty alterance_email in save tables<commit_after>
|
"""
Allow empty alternance email in SAVE
Revision ID: 36188da1a919
Revises: 6a40b75d390a
Create Date: 2021-07-19 10:33:57.780826
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa
# Revision identifiers, used by Alembic.
revision = '36188da1a919'
down_revision = '6a40b75d390a'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
def downgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
|
Allow empty alterance_email in save tables"""
Allow empty alternance email in SAVE
Revision ID: 36188da1a919
Revises: 6a40b75d390a
Create Date: 2021-07-19 10:33:57.780826
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa
# Revision identifiers, used by Alembic.
revision = '36188da1a919'
down_revision = '6a40b75d390a'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
def downgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
|
<commit_before><commit_msg>Allow empty alterance_email in save tables<commit_after>"""
Allow empty alternance email in SAVE
Revision ID: 36188da1a919
Revises: 6a40b75d390a
Create Date: 2021-07-19 10:33:57.780826
"""
from alembic import op
from sqlalchemy.dialects import mysql
import sqlalchemy as sa
# Revision identifiers, used by Alembic.
revision = '36188da1a919'
down_revision = '6a40b75d390a'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=True, existing_type=mysql.TEXT())
def downgrade():
op.alter_column('etablissements_admin_update', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_admin_add', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements_exportable', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
op.alter_column('etablissements', 'email_alternance', nullable=False, existing_type=mysql.TEXT())
|
|
edeaca9bbf1e6f65f11b1e7d1c892c5fd28113a4
|
bluebottle/clients/management/commands/migrate_start_project.py
|
bluebottle/clients/management/commands/migrate_start_project.py
|
from django.core.management.base import BaseCommand
from django.db import connection
from django.utils.timezone import now
from django.contrib.contenttypes.models import ContentType
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.pages.models import Page
from fluent_contents.models import Placeholder
from fluent_contents.plugins.rawhtml.models import RawHtmlItem
class Command(BaseCommand):
help = 'Create homepage from settings'
def add_arguments(self, parser):
parser.add_argument('--tenant', '-t', action='store', dest='tenant',
help="The tenant to import the homepage for")
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help="Import all tenants")
def handle(self, *args, **options):
if options['all']:
tenants = Client.objects.all()
if options['tenant']:
tenants = [Client.objects.get(schema_name=options['tenant'])]
for client in tenants:
print "\n\nCreating start project page for {}".format(client.name)
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
Page.objects.filter(slug='start-project').delete()
try:
for language, content in properties.START_PROJECT.items():
page = Page(
title=content['title'],
slug='start-project',
full_page=True,
language=language,
publication_date=now(),
status='published',
)
page.save()
page_type = ContentType.objects.get_for_model(page)
(placeholder, _created) = Placeholder.objects.get_or_create(
parent_id=page.pk,
parent_type_id=page_type.pk,
title='Body',
slot='blog_contents',
)
block_type = ContentType.objects.get_for_model(RawHtmlItem)
RawHtmlItem.objects.create_for_placeholder(
placeholder,
polymorphic_ctype=block_type, # This does not get set automatically in migrations
html=content['content']
)
except AttributeError:
pass
|
Add import script for start project pages
|
Add import script for start project pages
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add import script for start project pages
|
from django.core.management.base import BaseCommand
from django.db import connection
from django.utils.timezone import now
from django.contrib.contenttypes.models import ContentType
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.pages.models import Page
from fluent_contents.models import Placeholder
from fluent_contents.plugins.rawhtml.models import RawHtmlItem
class Command(BaseCommand):
help = 'Create homepage from settings'
def add_arguments(self, parser):
parser.add_argument('--tenant', '-t', action='store', dest='tenant',
help="The tenant to import the homepage for")
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help="Import all tenants")
def handle(self, *args, **options):
if options['all']:
tenants = Client.objects.all()
if options['tenant']:
tenants = [Client.objects.get(schema_name=options['tenant'])]
for client in tenants:
print "\n\nCreating start project page for {}".format(client.name)
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
Page.objects.filter(slug='start-project').delete()
try:
for language, content in properties.START_PROJECT.items():
page = Page(
title=content['title'],
slug='start-project',
full_page=True,
language=language,
publication_date=now(),
status='published',
)
page.save()
page_type = ContentType.objects.get_for_model(page)
(placeholder, _created) = Placeholder.objects.get_or_create(
parent_id=page.pk,
parent_type_id=page_type.pk,
title='Body',
slot='blog_contents',
)
block_type = ContentType.objects.get_for_model(RawHtmlItem)
RawHtmlItem.objects.create_for_placeholder(
placeholder,
polymorphic_ctype=block_type, # This does not get set automatically in migrations
html=content['content']
)
except AttributeError:
pass
|
<commit_before><commit_msg>Add import script for start project pages<commit_after>
|
from django.core.management.base import BaseCommand
from django.db import connection
from django.utils.timezone import now
from django.contrib.contenttypes.models import ContentType
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.pages.models import Page
from fluent_contents.models import Placeholder
from fluent_contents.plugins.rawhtml.models import RawHtmlItem
class Command(BaseCommand):
help = 'Create homepage from settings'
def add_arguments(self, parser):
parser.add_argument('--tenant', '-t', action='store', dest='tenant',
help="The tenant to import the homepage for")
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help="Import all tenants")
def handle(self, *args, **options):
if options['all']:
tenants = Client.objects.all()
if options['tenant']:
tenants = [Client.objects.get(schema_name=options['tenant'])]
for client in tenants:
print "\n\nCreating start project page for {}".format(client.name)
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
Page.objects.filter(slug='start-project').delete()
try:
for language, content in properties.START_PROJECT.items():
page = Page(
title=content['title'],
slug='start-project',
full_page=True,
language=language,
publication_date=now(),
status='published',
)
page.save()
page_type = ContentType.objects.get_for_model(page)
(placeholder, _created) = Placeholder.objects.get_or_create(
parent_id=page.pk,
parent_type_id=page_type.pk,
title='Body',
slot='blog_contents',
)
block_type = ContentType.objects.get_for_model(RawHtmlItem)
RawHtmlItem.objects.create_for_placeholder(
placeholder,
polymorphic_ctype=block_type, # This does not get set automatically in migrations
html=content['content']
)
except AttributeError:
pass
|
Add import script for start project pagesfrom django.core.management.base import BaseCommand
from django.db import connection
from django.utils.timezone import now
from django.contrib.contenttypes.models import ContentType
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.pages.models import Page
from fluent_contents.models import Placeholder
from fluent_contents.plugins.rawhtml.models import RawHtmlItem
class Command(BaseCommand):
help = 'Create homepage from settings'
def add_arguments(self, parser):
parser.add_argument('--tenant', '-t', action='store', dest='tenant',
help="The tenant to import the homepage for")
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help="Import all tenants")
def handle(self, *args, **options):
if options['all']:
tenants = Client.objects.all()
if options['tenant']:
tenants = [Client.objects.get(schema_name=options['tenant'])]
for client in tenants:
print "\n\nCreating start project page for {}".format(client.name)
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
Page.objects.filter(slug='start-project').delete()
try:
for language, content in properties.START_PROJECT.items():
page = Page(
title=content['title'],
slug='start-project',
full_page=True,
language=language,
publication_date=now(),
status='published',
)
page.save()
page_type = ContentType.objects.get_for_model(page)
(placeholder, _created) = Placeholder.objects.get_or_create(
parent_id=page.pk,
parent_type_id=page_type.pk,
title='Body',
slot='blog_contents',
)
block_type = ContentType.objects.get_for_model(RawHtmlItem)
RawHtmlItem.objects.create_for_placeholder(
placeholder,
polymorphic_ctype=block_type, # This does not get set automatically in migrations
html=content['content']
)
except AttributeError:
pass
|
<commit_before><commit_msg>Add import script for start project pages<commit_after>from django.core.management.base import BaseCommand
from django.db import connection
from django.utils.timezone import now
from django.contrib.contenttypes.models import ContentType
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.pages.models import Page
from fluent_contents.models import Placeholder
from fluent_contents.plugins.rawhtml.models import RawHtmlItem
class Command(BaseCommand):
help = 'Create homepage from settings'
def add_arguments(self, parser):
parser.add_argument('--tenant', '-t', action='store', dest='tenant',
help="The tenant to import the homepage for")
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help="Import all tenants")
def handle(self, *args, **options):
if options['all']:
tenants = Client.objects.all()
if options['tenant']:
tenants = [Client.objects.get(schema_name=options['tenant'])]
for client in tenants:
print "\n\nCreating start project page for {}".format(client.name)
connection.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
Page.objects.filter(slug='start-project').delete()
try:
for language, content in properties.START_PROJECT.items():
page = Page(
title=content['title'],
slug='start-project',
full_page=True,
language=language,
publication_date=now(),
status='published',
)
page.save()
page_type = ContentType.objects.get_for_model(page)
(placeholder, _created) = Placeholder.objects.get_or_create(
parent_id=page.pk,
parent_type_id=page_type.pk,
title='Body',
slot='blog_contents',
)
block_type = ContentType.objects.get_for_model(RawHtmlItem)
RawHtmlItem.objects.create_for_placeholder(
placeholder,
polymorphic_ctype=block_type, # This does not get set automatically in migrations
html=content['content']
)
except AttributeError:
pass
|
|
2cf7faf06739eafc30ec6eacb6ded88c898053f1
|
fabfile.py
|
fabfile.py
|
from urllib import request, response
import json
def can_access(api_key):
req = request.Request('https://api.vultr.com/v1/auth/info')
req.add_header('API-Key', api_key)
try:
with request.urlopen(req) as f:
data = f.read()
if not data:
print('Read data fail.')
return False
obj = json.loads(data.decode('utf-8'))
if not obj:
print('Parser json fail.')
return False
print('Access success.')
print('User: %s' % obj['name'])
print('Email: %s' % obj['email'])
return True
except BaseException as e:
print('Access fail.')
print(e)
return False
|
Check if can access vultr.
|
Check if can access vultr.
|
Python
|
apache-2.0
|
yingxuanxuan/fabric_script
|
Check if can access vultr.
|
from urllib import request, response
import json
def can_access(api_key):
req = request.Request('https://api.vultr.com/v1/auth/info')
req.add_header('API-Key', api_key)
try:
with request.urlopen(req) as f:
data = f.read()
if not data:
print('Read data fail.')
return False
obj = json.loads(data.decode('utf-8'))
if not obj:
print('Parser json fail.')
return False
print('Access success.')
print('User: %s' % obj['name'])
print('Email: %s' % obj['email'])
return True
except BaseException as e:
print('Access fail.')
print(e)
return False
|
<commit_before><commit_msg>Check if can access vultr.<commit_after>
|
from urllib import request, response
import json
def can_access(api_key):
req = request.Request('https://api.vultr.com/v1/auth/info')
req.add_header('API-Key', api_key)
try:
with request.urlopen(req) as f:
data = f.read()
if not data:
print('Read data fail.')
return False
obj = json.loads(data.decode('utf-8'))
if not obj:
print('Parser json fail.')
return False
print('Access success.')
print('User: %s' % obj['name'])
print('Email: %s' % obj['email'])
return True
except BaseException as e:
print('Access fail.')
print(e)
return False
|
Check if can access vultr.from urllib import request, response
import json
def can_access(api_key):
req = request.Request('https://api.vultr.com/v1/auth/info')
req.add_header('API-Key', api_key)
try:
with request.urlopen(req) as f:
data = f.read()
if not data:
print('Read data fail.')
return False
obj = json.loads(data.decode('utf-8'))
if not obj:
print('Parser json fail.')
return False
print('Access success.')
print('User: %s' % obj['name'])
print('Email: %s' % obj['email'])
return True
except BaseException as e:
print('Access fail.')
print(e)
return False
|
<commit_before><commit_msg>Check if can access vultr.<commit_after>from urllib import request, response
import json
def can_access(api_key):
req = request.Request('https://api.vultr.com/v1/auth/info')
req.add_header('API-Key', api_key)
try:
with request.urlopen(req) as f:
data = f.read()
if not data:
print('Read data fail.')
return False
obj = json.loads(data.decode('utf-8'))
if not obj:
print('Parser json fail.')
return False
print('Access success.')
print('User: %s' % obj['name'])
print('Email: %s' % obj['email'])
return True
except BaseException as e:
print('Access fail.')
print(e)
return False
|
|
2a7569a20c1840f81d0baf04d9849601ce51e77a
|
fabfile.py
|
fabfile.py
|
"""
Deploy Freesound Explorer to labs.freesound.org (required Pythons's fabric installed)
"""
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.files import put
env.hosts = ['ffont@fs-labs.s.upf.edu']
remote_dir = '/homedtic/ffont/apps/freesound-explorer'
def __copy_static():
with cd(remote_dir):
put("static/js/bundle.js", remote_dir + '/static/js/')
def __pull():
# Pull del repo de git
with cd(remote_dir):
run("git pull")
def deploy():
__pull()
__copy_static()
# NOTE: if needed, backend restart must be done manually
|
Add deploy script using Fabric
|
Add deploy script using Fabric
|
Python
|
mit
|
ffont/freesound-explorer,ffont/freesound-explorer,ffont/freesound-explorer
|
Add deploy script using Fabric
|
"""
Deploy Freesound Explorer to labs.freesound.org (required Pythons's fabric installed)
"""
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.files import put
env.hosts = ['ffont@fs-labs.s.upf.edu']
remote_dir = '/homedtic/ffont/apps/freesound-explorer'
def __copy_static():
with cd(remote_dir):
put("static/js/bundle.js", remote_dir + '/static/js/')
def __pull():
# Pull del repo de git
with cd(remote_dir):
run("git pull")
def deploy():
__pull()
__copy_static()
# NOTE: if needed, backend restart must be done manually
|
<commit_before><commit_msg>Add deploy script using Fabric<commit_after>
|
"""
Deploy Freesound Explorer to labs.freesound.org (required Pythons's fabric installed)
"""
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.files import put
env.hosts = ['ffont@fs-labs.s.upf.edu']
remote_dir = '/homedtic/ffont/apps/freesound-explorer'
def __copy_static():
with cd(remote_dir):
put("static/js/bundle.js", remote_dir + '/static/js/')
def __pull():
# Pull del repo de git
with cd(remote_dir):
run("git pull")
def deploy():
__pull()
__copy_static()
# NOTE: if needed, backend restart must be done manually
|
Add deploy script using Fabric"""
Deploy Freesound Explorer to labs.freesound.org (required Pythons's fabric installed)
"""
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.files import put
env.hosts = ['ffont@fs-labs.s.upf.edu']
remote_dir = '/homedtic/ffont/apps/freesound-explorer'
def __copy_static():
with cd(remote_dir):
put("static/js/bundle.js", remote_dir + '/static/js/')
def __pull():
# Pull del repo de git
with cd(remote_dir):
run("git pull")
def deploy():
__pull()
__copy_static()
# NOTE: if needed, backend restart must be done manually
|
<commit_before><commit_msg>Add deploy script using Fabric<commit_after>"""
Deploy Freesound Explorer to labs.freesound.org (required Pythons's fabric installed)
"""
from __future__ import with_statement
from fabric.api import *
from fabric.contrib.files import put
env.hosts = ['ffont@fs-labs.s.upf.edu']
remote_dir = '/homedtic/ffont/apps/freesound-explorer'
def __copy_static():
with cd(remote_dir):
put("static/js/bundle.js", remote_dir + '/static/js/')
def __pull():
# Pull del repo de git
with cd(remote_dir):
run("git pull")
def deploy():
__pull()
__copy_static()
# NOTE: if needed, backend restart must be done manually
|
|
6091954f98410c006457875b45f3faf094f2bc78
|
tests/custom_test_case.py
|
tests/custom_test_case.py
|
class CustomTestCase(object):
def assertSentence(self, result, expected):
try:
self.assertEqual(result, expected)
except AssertionError as e:
e.args = (e.args[0].replace("\\n", "\n"),) # edit the exception's message
raise
|
Add CustomTestCase class to format output of assertEqual
|
Add CustomTestCase class to format output of assertEqual
|
Python
|
mit
|
kyamaguchi/SublimeObjC2RubyMotion,kyamaguchi/SublimeObjC2RubyMotion
|
Add CustomTestCase class to format output of assertEqual
|
class CustomTestCase(object):
def assertSentence(self, result, expected):
try:
self.assertEqual(result, expected)
except AssertionError as e:
e.args = (e.args[0].replace("\\n", "\n"),) # edit the exception's message
raise
|
<commit_before><commit_msg>Add CustomTestCase class to format output of assertEqual<commit_after>
|
class CustomTestCase(object):
def assertSentence(self, result, expected):
try:
self.assertEqual(result, expected)
except AssertionError as e:
e.args = (e.args[0].replace("\\n", "\n"),) # edit the exception's message
raise
|
Add CustomTestCase class to format output of assertEqualclass CustomTestCase(object):
def assertSentence(self, result, expected):
try:
self.assertEqual(result, expected)
except AssertionError as e:
e.args = (e.args[0].replace("\\n", "\n"),) # edit the exception's message
raise
|
<commit_before><commit_msg>Add CustomTestCase class to format output of assertEqual<commit_after>class CustomTestCase(object):
def assertSentence(self, result, expected):
try:
self.assertEqual(result, expected)
except AssertionError as e:
e.args = (e.args[0].replace("\\n", "\n"),) # edit the exception's message
raise
|
|
56da76bd91d937576f94576c0e9ac5136c6da4a8
|
emission/tests/netTests/TestPipeline.py
|
emission/tests/netTests/TestPipeline.py
|
import unittest
import logging
import arrow
import os
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.tests.common as etc
from emission.net.api import pipeline
class TestPipeline(unittest.TestCase):
def setUp(self):
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-21")
self.testUUID1 = self.testUUID
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-27")
def tearDown(self):
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID})
edb.get_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID1})
def testNoAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
def testAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
etc.runIntakePipeline(self.testUUID)
self.assertAlmostEqual(pipeline.get_range(self.testUUID), (1440688739.672, 1440729142.709))
if __name__ == '__main__':
import emission.tests.common as etc
etc.configLogging()
unittest.main()
|
Add a unit test for the pipeline range query
|
Add a unit test for the pipeline range query
So we can check for 1c113c005b260a13bd93c33bf9b033184c2f0ea5
going forward
|
Python
|
bsd-3-clause
|
shankari/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server
|
Add a unit test for the pipeline range query
So we can check for 1c113c005b260a13bd93c33bf9b033184c2f0ea5
going forward
|
import unittest
import logging
import arrow
import os
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.tests.common as etc
from emission.net.api import pipeline
class TestPipeline(unittest.TestCase):
def setUp(self):
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-21")
self.testUUID1 = self.testUUID
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-27")
def tearDown(self):
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID})
edb.get_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID1})
def testNoAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
def testAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
etc.runIntakePipeline(self.testUUID)
self.assertAlmostEqual(pipeline.get_range(self.testUUID), (1440688739.672, 1440729142.709))
if __name__ == '__main__':
import emission.tests.common as etc
etc.configLogging()
unittest.main()
|
<commit_before><commit_msg>Add a unit test for the pipeline range query
So we can check for 1c113c005b260a13bd93c33bf9b033184c2f0ea5
going forward<commit_after>
|
import unittest
import logging
import arrow
import os
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.tests.common as etc
from emission.net.api import pipeline
class TestPipeline(unittest.TestCase):
def setUp(self):
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-21")
self.testUUID1 = self.testUUID
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-27")
def tearDown(self):
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID})
edb.get_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID1})
def testNoAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
def testAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
etc.runIntakePipeline(self.testUUID)
self.assertAlmostEqual(pipeline.get_range(self.testUUID), (1440688739.672, 1440729142.709))
if __name__ == '__main__':
import emission.tests.common as etc
etc.configLogging()
unittest.main()
|
Add a unit test for the pipeline range query
So we can check for 1c113c005b260a13bd93c33bf9b033184c2f0ea5
going forwardimport unittest
import logging
import arrow
import os
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.tests.common as etc
from emission.net.api import pipeline
class TestPipeline(unittest.TestCase):
def setUp(self):
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-21")
self.testUUID1 = self.testUUID
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-27")
def tearDown(self):
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID})
edb.get_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID1})
def testNoAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
def testAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
etc.runIntakePipeline(self.testUUID)
self.assertAlmostEqual(pipeline.get_range(self.testUUID), (1440688739.672, 1440729142.709))
if __name__ == '__main__':
import emission.tests.common as etc
etc.configLogging()
unittest.main()
|
<commit_before><commit_msg>Add a unit test for the pipeline range query
So we can check for 1c113c005b260a13bd93c33bf9b033184c2f0ea5
going forward<commit_after>import unittest
import logging
import arrow
import os
import emission.core.get_database as edb
import emission.core.wrapper.localdate as ecwl
import emission.tests.common as etc
from emission.net.api import pipeline
class TestPipeline(unittest.TestCase):
def setUp(self):
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-21")
self.testUUID1 = self.testUUID
etc.setupRealExample(self,
"emission/tests/data/real_examples/shankari_2015-aug-27")
def tearDown(self):
self.clearRelatedDb()
def clearRelatedDb(self):
edb.get_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID})
edb.get_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_analysis_timeseries_db().delete_many({"user_id": self.testUUID1})
edb.get_pipeline_state_db().delete_many({"user_id": self.testUUID1})
def testNoAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
def testAnalysisResults(self):
self.assertEqual(pipeline.get_range(self.testUUID), (None, None))
etc.runIntakePipeline(self.testUUID)
self.assertAlmostEqual(pipeline.get_range(self.testUUID), (1440688739.672, 1440729142.709))
if __name__ == '__main__':
import emission.tests.common as etc
etc.configLogging()
unittest.main()
|
|
6130ef75adf9755ab63f82a9dc98d9be29e2b9bc
|
models/classifier.py
|
models/classifier.py
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia classifiers."""
__author__ = 'Sean Lip'
import os
from base_model import BaseModel
import feconf
import utils
from google.appengine.ext import ndb
class RuleSpec(ndb.Model):
"""A rule specification in a classifier."""
# Python code for the rule, e.g. "equals(x)"
rule = ndb.StringProperty()
# Human-readable text used to display the rule in the UI, e.g. "Answer is
# equal to {{x|MusicNote}}".
name = ndb.TextProperty()
# Python code for pre-commit checks on the rule parameters.
checks = ndb.TextProperty(repeated=True)
class Classifier(BaseModel):
"""An Oppia classifier."""
# The id is the same as the directory name for this classifier.
@property
def id(self):
return self.key.id()
# Rule specifications for the classifier.
rules = ndb.LocalStructuredProperty(RuleSpec, repeated=True)
@classmethod
def delete_all_classifiers(cls):
"""Deletes all classifiers."""
classifier_list = Classifier.query()
for classifier in classifier_list:
classifier.key.delete()
@classmethod
def load_default_classifiers(cls):
"""Loads the default classifiers."""
# raise Exception(os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR))
classifier_ids = [d for d in os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR)
if os.path.isdir(
os.path.join(feconf.SAMPLE_CLASSIFIERS_DIR, d))]
for classifier_id in classifier_ids:
rules_filepath = os.path.join(
feconf.SAMPLE_CLASSIFIERS_DIR, classifier_id,
'%sRules.yaml' % classifier_id)
with open(rules_filepath) as f:
rule_dict = utils.dict_from_yaml(f.read().decode('utf-8'))
rules = []
for rule in rule_dict:
r_spec = RuleSpec(rule=rule, name=rule_dict[rule]['name'])
if 'checks' in rule_dict[rule]:
r_spec.checks = rule_dict[rule]['checks']
rules.append(r_spec)
Classifier(id=classifier_id, rules=rules).put()
|
Add file missed in previous commit.
|
Add file missed in previous commit.
|
Python
|
apache-2.0
|
asandyz/oppia,danieljjh/oppia,wangsai/oppia,brianrodri/oppia,BenHenning/oppia,prasanna08/oppia,google-code-export/oppia,felipecocco/oppia,AllanYangZhou/oppia,rackstar17/oppia,toooooper/oppia,oppia/oppia,MaximLich/oppia,VictoriaRoux/oppia,Cgruppo/oppia,kingctan/oppia,bjvoth/oppia,gale320/oppia,Atlas-Sailed-Co/oppia,google-code-export/oppia,sarahfo/oppia,himanshu-dixit/oppia,MaximLich/oppia,mit0110/oppia,sbhowmik89/oppia,brylie/oppia,souravbadami/oppia,openhatch/oh-missions-oppia-beta,terrameijar/oppia,virajprabhu/oppia,zgchizi/oppia-uc,Dev4X/oppia,sarahfo/oppia,anthkris/oppia,CMDann/oppia,toooooper/oppia,aldeka/oppia,sdulal/oppia,sdulal/oppia,sanyaade-teachings/oppia,shaz13/oppia,bjvoth/oppia,MAKOSCAFEE/oppia,bjvoth/oppia,jestapinski/oppia,fernandopinhati/oppia,cleophasmashiri/oppia,leandrotoledo/oppia,MaximLich/oppia,michaelWagner/oppia,himanshu-dixit/oppia,anthkris/oppia,amitdeutsch/oppia,amitdeutsch/oppia,infinyte/oppia,danieljjh/oppia,shaz13/oppia,Dev4X/oppia,mindpin/mindpin_oppia,anggorodewanto/oppia,sunu/oh-missions-oppia-beta,sarahfo/oppia,sbhowmik89/oppia,BenHenning/oppia,toooooper/oppia,edallison/oppia,terrameijar/oppia,google-code-export/oppia,hazmatzo/oppia,brianrodri/oppia,mindpin/mindpin_oppia,directorlive/oppia,openhatch/oh-missions-oppia-beta,amitdeutsch/oppia,zgchizi/oppia-uc,kaffeel/oppia,terrameijar/oppia,nagyistoce/oppia,jestapinski/oppia,paulproteus/oppia-test-3,leandrotoledo/oppia,bjvoth/oppia,oppia/oppia,Atlas-Sailed-Co/oppia,oppia/oppia,sunu/oppia,michaelWagner/oppia,brianrodri/oppia,rackstar17/oppia,kevinlee12/oppia,souravbadami/oppia,amgowano/oppia,kingctan/oppia,anthkris/oppia,won0089/oppia,brianrodri/oppia,nagyistoce/oppia,rackstar17/oppia,sbhowmik89/oppia,shaz13/oppia,zgchizi/oppia-uc,asandyz/oppia,sunu/oppia,gale320/oppia,nagyistoce/oppia,zgchizi/oppia-uc,felipecocco/oppia,felipecocco/oppia,fernandopinhati/oppia,himanshu-dixit/oppia,fernandopinhati/oppia,VictoriaRoux/oppia,DewarM/oppia,whygee/oppia,MAKOSCAFEE/oppia,BenHenning/oppia,aldeka/oppia,whygee/oppia,CMDann/oppia,Cgruppo/oppia,kennho/oppia,kevinlee12/oppia,prasanna08/oppia,kennho/oppia,AllanYangZhou/oppia,dippatel1994/oppia,leandrotoledo/oppia,hazmatzo/oppia,edallison/oppia,jestapinski/oppia,kevinlee12/oppia,raju249/oppia,sanyaade-teachings/oppia,jestapinski/oppia,brylie/oppia,kingctan/oppia,aldeka/oppia,mindpin/mindpin_oppia,prasanna08/oppia,mit0110/oppia,miyucy/oppia,dippatel1994/oppia,mit0110/oppia,nagyistoce/oppia,sbhowmik89/oppia,amitdeutsch/oppia,hazmatzo/oppia,edallison/oppia,sdulal/oppia,raju249/oppia,CMDann/oppia,infinyte/oppia,Cgruppo/oppia,danieljjh/oppia,sarahfo/oppia,gale320/oppia,gale320/oppia,brylie/oppia,oulan/oppia,kennho/oppia,openhatch/oh-missions-oppia-beta,sunu/oppia-test-2,prasanna08/oppia,anggorodewanto/oppia,won0089/oppia,Atlas-Sailed-Co/oppia,openhatch/oh-missions-oppia-beta,CMDann/oppia,sbhowmik89/oppia,michaelWagner/oppia,miyucy/oppia,mit0110/oppia,whygee/oppia,virajprabhu/oppia,kevinlee12/oppia,Atlas-Sailed-Co/oppia,MAKOSCAFEE/oppia,directorlive/oppia,toooooper/oppia,danieljjh/oppia,directorlive/oppia,toooooper/oppia,VictoriaRoux/oppia,Dev4X/oppia,oulan/oppia,souravbadami/oppia,CMDann/oppia,michaelWagner/oppia,kaffeel/oppia,amgowano/oppia,kaffeel/oppia,kennho/oppia,won0089/oppia,virajprabhu/oppia,DewarM/oppia,fernandopinhati/oppia,Atlas-Sailed-Co/oppia,miyucy/oppia,raju249/oppia,wangsai/oppia,MAKOSCAFEE/oppia,won0089/oppia,danieljjh/oppia,paulproteus/oppia-test-3,amgowano/oppia,virajprabhu/oppia,nagyistoce/oppia,sunu/oppia-test-4,oulan/oppia,wangsai/oppia,sunu/oh-missions-oppia-beta,kevinlee12/oppia,kaffeel/oppia,fernandopinhati/oppia,sanyaade-teachings/oppia,michaelWagner/oppia,prasanna08/oppia,kaffeel/oppia,DewarM/oppia,hazmatzo/oppia,anggorodewanto/oppia,sunu/oppia,Dev4X/oppia,oppia/oppia,virajprabhu/oppia,shaz13/oppia,Cgruppo/oppia,AllanYangZhou/oppia,edallison/oppia,souravbadami/oppia,directorlive/oppia,sarahfo/oppia,BenHenning/oppia,asandyz/oppia,oulan/oppia,raju249/oppia,cleophasmashiri/oppia,felipecocco/oppia,VictoriaRoux/oppia,brianrodri/oppia,sdulal/oppia,hazmatzo/oppia,brylie/oppia,asandyz/oppia,dippatel1994/oppia,infinyte/oppia,Dev4X/oppia,cleophasmashiri/oppia,directorlive/oppia,wangsai/oppia,anthkris/oppia,VictoriaRoux/oppia,paulproteus/oppia-test-3,leandrotoledo/oppia,aldeka/oppia,whygee/oppia,google-code-export/oppia,brylie/oppia,rackstar17/oppia,terrameijar/oppia,google-code-export/oppia,sunu/oppia,MaximLich/oppia,asandyz/oppia,cleophasmashiri/oppia,souravbadami/oppia,mit0110/oppia,oulan/oppia,whygee/oppia,oppia/oppia,won0089/oppia,infinyte/oppia,himanshu-dixit/oppia,Cgruppo/oppia,cleophasmashiri/oppia,kingctan/oppia,sunu/oppia-test-2,dippatel1994/oppia,kennho/oppia,infinyte/oppia,bjvoth/oppia,gale320/oppia,dippatel1994/oppia,leandrotoledo/oppia,amgowano/oppia,sanyaade-teachings/oppia,AllanYangZhou/oppia,miyucy/oppia,sunu/oppia-test-2,amitdeutsch/oppia,sdulal/oppia,BenHenning/oppia,felipecocco/oppia,mindpin/mindpin_oppia,sunu/oppia-test-4,wangsai/oppia,DewarM/oppia,sunu/oh-missions-oppia-beta,sunu/oh-missions-oppia-beta,anggorodewanto/oppia,sunu/oppia,kingctan/oppia,DewarM/oppia,sanyaade-teachings/oppia,sunu/oppia-test-4
|
Add file missed in previous commit.
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia classifiers."""
__author__ = 'Sean Lip'
import os
from base_model import BaseModel
import feconf
import utils
from google.appengine.ext import ndb
class RuleSpec(ndb.Model):
"""A rule specification in a classifier."""
# Python code for the rule, e.g. "equals(x)"
rule = ndb.StringProperty()
# Human-readable text used to display the rule in the UI, e.g. "Answer is
# equal to {{x|MusicNote}}".
name = ndb.TextProperty()
# Python code for pre-commit checks on the rule parameters.
checks = ndb.TextProperty(repeated=True)
class Classifier(BaseModel):
"""An Oppia classifier."""
# The id is the same as the directory name for this classifier.
@property
def id(self):
return self.key.id()
# Rule specifications for the classifier.
rules = ndb.LocalStructuredProperty(RuleSpec, repeated=True)
@classmethod
def delete_all_classifiers(cls):
"""Deletes all classifiers."""
classifier_list = Classifier.query()
for classifier in classifier_list:
classifier.key.delete()
@classmethod
def load_default_classifiers(cls):
"""Loads the default classifiers."""
# raise Exception(os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR))
classifier_ids = [d for d in os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR)
if os.path.isdir(
os.path.join(feconf.SAMPLE_CLASSIFIERS_DIR, d))]
for classifier_id in classifier_ids:
rules_filepath = os.path.join(
feconf.SAMPLE_CLASSIFIERS_DIR, classifier_id,
'%sRules.yaml' % classifier_id)
with open(rules_filepath) as f:
rule_dict = utils.dict_from_yaml(f.read().decode('utf-8'))
rules = []
for rule in rule_dict:
r_spec = RuleSpec(rule=rule, name=rule_dict[rule]['name'])
if 'checks' in rule_dict[rule]:
r_spec.checks = rule_dict[rule]['checks']
rules.append(r_spec)
Classifier(id=classifier_id, rules=rules).put()
|
<commit_before><commit_msg>Add file missed in previous commit.<commit_after>
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia classifiers."""
__author__ = 'Sean Lip'
import os
from base_model import BaseModel
import feconf
import utils
from google.appengine.ext import ndb
class RuleSpec(ndb.Model):
"""A rule specification in a classifier."""
# Python code for the rule, e.g. "equals(x)"
rule = ndb.StringProperty()
# Human-readable text used to display the rule in the UI, e.g. "Answer is
# equal to {{x|MusicNote}}".
name = ndb.TextProperty()
# Python code for pre-commit checks on the rule parameters.
checks = ndb.TextProperty(repeated=True)
class Classifier(BaseModel):
"""An Oppia classifier."""
# The id is the same as the directory name for this classifier.
@property
def id(self):
return self.key.id()
# Rule specifications for the classifier.
rules = ndb.LocalStructuredProperty(RuleSpec, repeated=True)
@classmethod
def delete_all_classifiers(cls):
"""Deletes all classifiers."""
classifier_list = Classifier.query()
for classifier in classifier_list:
classifier.key.delete()
@classmethod
def load_default_classifiers(cls):
"""Loads the default classifiers."""
# raise Exception(os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR))
classifier_ids = [d for d in os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR)
if os.path.isdir(
os.path.join(feconf.SAMPLE_CLASSIFIERS_DIR, d))]
for classifier_id in classifier_ids:
rules_filepath = os.path.join(
feconf.SAMPLE_CLASSIFIERS_DIR, classifier_id,
'%sRules.yaml' % classifier_id)
with open(rules_filepath) as f:
rule_dict = utils.dict_from_yaml(f.read().decode('utf-8'))
rules = []
for rule in rule_dict:
r_spec = RuleSpec(rule=rule, name=rule_dict[rule]['name'])
if 'checks' in rule_dict[rule]:
r_spec.checks = rule_dict[rule]['checks']
rules.append(r_spec)
Classifier(id=classifier_id, rules=rules).put()
|
Add file missed in previous commit.# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia classifiers."""
__author__ = 'Sean Lip'
import os
from base_model import BaseModel
import feconf
import utils
from google.appengine.ext import ndb
class RuleSpec(ndb.Model):
"""A rule specification in a classifier."""
# Python code for the rule, e.g. "equals(x)"
rule = ndb.StringProperty()
# Human-readable text used to display the rule in the UI, e.g. "Answer is
# equal to {{x|MusicNote}}".
name = ndb.TextProperty()
# Python code for pre-commit checks on the rule parameters.
checks = ndb.TextProperty(repeated=True)
class Classifier(BaseModel):
"""An Oppia classifier."""
# The id is the same as the directory name for this classifier.
@property
def id(self):
return self.key.id()
# Rule specifications for the classifier.
rules = ndb.LocalStructuredProperty(RuleSpec, repeated=True)
@classmethod
def delete_all_classifiers(cls):
"""Deletes all classifiers."""
classifier_list = Classifier.query()
for classifier in classifier_list:
classifier.key.delete()
@classmethod
def load_default_classifiers(cls):
"""Loads the default classifiers."""
# raise Exception(os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR))
classifier_ids = [d for d in os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR)
if os.path.isdir(
os.path.join(feconf.SAMPLE_CLASSIFIERS_DIR, d))]
for classifier_id in classifier_ids:
rules_filepath = os.path.join(
feconf.SAMPLE_CLASSIFIERS_DIR, classifier_id,
'%sRules.yaml' % classifier_id)
with open(rules_filepath) as f:
rule_dict = utils.dict_from_yaml(f.read().decode('utf-8'))
rules = []
for rule in rule_dict:
r_spec = RuleSpec(rule=rule, name=rule_dict[rule]['name'])
if 'checks' in rule_dict[rule]:
r_spec.checks = rule_dict[rule]['checks']
rules.append(r_spec)
Classifier(id=classifier_id, rules=rules).put()
|
<commit_before><commit_msg>Add file missed in previous commit.<commit_after># coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia classifiers."""
__author__ = 'Sean Lip'
import os
from base_model import BaseModel
import feconf
import utils
from google.appengine.ext import ndb
class RuleSpec(ndb.Model):
"""A rule specification in a classifier."""
# Python code for the rule, e.g. "equals(x)"
rule = ndb.StringProperty()
# Human-readable text used to display the rule in the UI, e.g. "Answer is
# equal to {{x|MusicNote}}".
name = ndb.TextProperty()
# Python code for pre-commit checks on the rule parameters.
checks = ndb.TextProperty(repeated=True)
class Classifier(BaseModel):
"""An Oppia classifier."""
# The id is the same as the directory name for this classifier.
@property
def id(self):
return self.key.id()
# Rule specifications for the classifier.
rules = ndb.LocalStructuredProperty(RuleSpec, repeated=True)
@classmethod
def delete_all_classifiers(cls):
"""Deletes all classifiers."""
classifier_list = Classifier.query()
for classifier in classifier_list:
classifier.key.delete()
@classmethod
def load_default_classifiers(cls):
"""Loads the default classifiers."""
# raise Exception(os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR))
classifier_ids = [d for d in os.listdir(feconf.SAMPLE_CLASSIFIERS_DIR)
if os.path.isdir(
os.path.join(feconf.SAMPLE_CLASSIFIERS_DIR, d))]
for classifier_id in classifier_ids:
rules_filepath = os.path.join(
feconf.SAMPLE_CLASSIFIERS_DIR, classifier_id,
'%sRules.yaml' % classifier_id)
with open(rules_filepath) as f:
rule_dict = utils.dict_from_yaml(f.read().decode('utf-8'))
rules = []
for rule in rule_dict:
r_spec = RuleSpec(rule=rule, name=rule_dict[rule]['name'])
if 'checks' in rule_dict[rule]:
r_spec.checks = rule_dict[rule]['checks']
rules.append(r_spec)
Classifier(id=classifier_id, rules=rules).put()
|
|
18a4732ef1f2de5cbfcd61905a609a4ae4eb73e9
|
numba/cuda/tests/cudadrv/test_managed_alloc.py
|
numba/cuda/tests/cudadrv/test_managed_alloc.py
|
import numpy as np
from numba.cuda.cudadrv import driver
from numba import cuda
from numba.cuda.testing import unittest, ContextResettingTestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestManagedAlloc(ContextResettingTestCase):
def test_managed_alloc_driver(self):
# Verify that we can allocate and operate on managed
# memory through the CUDA driver interface.
n = 32
mem = cuda.current_context().memallocmanaged(n)
dtype = np.dtype(np.uint8)
ary = np.ndarray(shape=n // dtype.itemsize, dtype=dtype,
buffer=mem)
magic = 0xab
driver.device_memset(mem, magic, n)
self.assertTrue(np.all(ary == magic))
def test_managed_alloc_oversubscription(self):
# Verify we can correctly operate on a managed array
# larger than the GPU memory, on both CPU and GPU.
ctx = cuda.current_context()
total_mem_size = ctx.get_memory_info().total
dtype = np.dtype(np.float32)
n_bytes = 2 * total_mem_size
n_elems = int(n_bytes / dtype.itemsize)
ary = cuda.managed_array(n_elems, dtype=dtype)
ary.fill(123)
self.assertTrue(all(ary == 123))
driver.device_memset(ary, 0, n_bytes)
self.assertTrue(all(ary == 0))
if __name__ == '__main__':
unittest.main()
|
Add test for managed alloc
|
Add test for managed alloc
|
Python
|
bsd-2-clause
|
seibert/numba,numba/numba,gmarkall/numba,IntelLabs/numba,gmarkall/numba,numba/numba,stonebig/numba,IntelLabs/numba,numba/numba,stonebig/numba,numba/numba,stonebig/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,stuartarchibald/numba,stuartarchibald/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,cpcloud/numba,stonebig/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,cpcloud/numba,cpcloud/numba,cpcloud/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,seibert/numba,gmarkall/numba
|
Add test for managed alloc
|
import numpy as np
from numba.cuda.cudadrv import driver
from numba import cuda
from numba.cuda.testing import unittest, ContextResettingTestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestManagedAlloc(ContextResettingTestCase):
def test_managed_alloc_driver(self):
# Verify that we can allocate and operate on managed
# memory through the CUDA driver interface.
n = 32
mem = cuda.current_context().memallocmanaged(n)
dtype = np.dtype(np.uint8)
ary = np.ndarray(shape=n // dtype.itemsize, dtype=dtype,
buffer=mem)
magic = 0xab
driver.device_memset(mem, magic, n)
self.assertTrue(np.all(ary == magic))
def test_managed_alloc_oversubscription(self):
# Verify we can correctly operate on a managed array
# larger than the GPU memory, on both CPU and GPU.
ctx = cuda.current_context()
total_mem_size = ctx.get_memory_info().total
dtype = np.dtype(np.float32)
n_bytes = 2 * total_mem_size
n_elems = int(n_bytes / dtype.itemsize)
ary = cuda.managed_array(n_elems, dtype=dtype)
ary.fill(123)
self.assertTrue(all(ary == 123))
driver.device_memset(ary, 0, n_bytes)
self.assertTrue(all(ary == 0))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for managed alloc<commit_after>
|
import numpy as np
from numba.cuda.cudadrv import driver
from numba import cuda
from numba.cuda.testing import unittest, ContextResettingTestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestManagedAlloc(ContextResettingTestCase):
def test_managed_alloc_driver(self):
# Verify that we can allocate and operate on managed
# memory through the CUDA driver interface.
n = 32
mem = cuda.current_context().memallocmanaged(n)
dtype = np.dtype(np.uint8)
ary = np.ndarray(shape=n // dtype.itemsize, dtype=dtype,
buffer=mem)
magic = 0xab
driver.device_memset(mem, magic, n)
self.assertTrue(np.all(ary == magic))
def test_managed_alloc_oversubscription(self):
# Verify we can correctly operate on a managed array
# larger than the GPU memory, on both CPU and GPU.
ctx = cuda.current_context()
total_mem_size = ctx.get_memory_info().total
dtype = np.dtype(np.float32)
n_bytes = 2 * total_mem_size
n_elems = int(n_bytes / dtype.itemsize)
ary = cuda.managed_array(n_elems, dtype=dtype)
ary.fill(123)
self.assertTrue(all(ary == 123))
driver.device_memset(ary, 0, n_bytes)
self.assertTrue(all(ary == 0))
if __name__ == '__main__':
unittest.main()
|
Add test for managed allocimport numpy as np
from numba.cuda.cudadrv import driver
from numba import cuda
from numba.cuda.testing import unittest, ContextResettingTestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestManagedAlloc(ContextResettingTestCase):
def test_managed_alloc_driver(self):
# Verify that we can allocate and operate on managed
# memory through the CUDA driver interface.
n = 32
mem = cuda.current_context().memallocmanaged(n)
dtype = np.dtype(np.uint8)
ary = np.ndarray(shape=n // dtype.itemsize, dtype=dtype,
buffer=mem)
magic = 0xab
driver.device_memset(mem, magic, n)
self.assertTrue(np.all(ary == magic))
def test_managed_alloc_oversubscription(self):
# Verify we can correctly operate on a managed array
# larger than the GPU memory, on both CPU and GPU.
ctx = cuda.current_context()
total_mem_size = ctx.get_memory_info().total
dtype = np.dtype(np.float32)
n_bytes = 2 * total_mem_size
n_elems = int(n_bytes / dtype.itemsize)
ary = cuda.managed_array(n_elems, dtype=dtype)
ary.fill(123)
self.assertTrue(all(ary == 123))
driver.device_memset(ary, 0, n_bytes)
self.assertTrue(all(ary == 0))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for managed alloc<commit_after>import numpy as np
from numba.cuda.cudadrv import driver
from numba import cuda
from numba.cuda.testing import unittest, ContextResettingTestCase
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('CUDA Driver API unsupported in the simulator')
class TestManagedAlloc(ContextResettingTestCase):
def test_managed_alloc_driver(self):
# Verify that we can allocate and operate on managed
# memory through the CUDA driver interface.
n = 32
mem = cuda.current_context().memallocmanaged(n)
dtype = np.dtype(np.uint8)
ary = np.ndarray(shape=n // dtype.itemsize, dtype=dtype,
buffer=mem)
magic = 0xab
driver.device_memset(mem, magic, n)
self.assertTrue(np.all(ary == magic))
def test_managed_alloc_oversubscription(self):
# Verify we can correctly operate on a managed array
# larger than the GPU memory, on both CPU and GPU.
ctx = cuda.current_context()
total_mem_size = ctx.get_memory_info().total
dtype = np.dtype(np.float32)
n_bytes = 2 * total_mem_size
n_elems = int(n_bytes / dtype.itemsize)
ary = cuda.managed_array(n_elems, dtype=dtype)
ary.fill(123)
self.assertTrue(all(ary == 123))
driver.device_memset(ary, 0, n_bytes)
self.assertTrue(all(ary == 0))
if __name__ == '__main__':
unittest.main()
|
|
1102db025fd41b91d9cfa1ce93dddd72da47926c
|
py/most-frequent-subtree-sum.py
|
py/most-frequent-subtree-sum.py
|
from collections import Counter
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def top_down(self, cur, c):
if cur:
lsum = self.top_down(cur.left, c)
rsum = self.top_down(cur.right, c)
s = lsum + rsum + cur.val
c[s] += 1
if c[s] > self.freq:
self.freq = c[s]
self.most_freq = [s]
elif c[s] == self.freq:
self.most_freq.append(s)
return s
else:
return 0
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
c = Counter()
self.most_freq = []
self.freq = 0
self.top_down(root, c)
return self.most_freq
|
Add py solution for 508. Most Frequent Subtree Sum
|
Add py solution for 508. Most Frequent Subtree Sum
508. Most Frequent Subtree Sum: https://leetcode.com/problems/most-frequent-subtree-sum/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 508. Most Frequent Subtree Sum
508. Most Frequent Subtree Sum: https://leetcode.com/problems/most-frequent-subtree-sum/
|
from collections import Counter
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def top_down(self, cur, c):
if cur:
lsum = self.top_down(cur.left, c)
rsum = self.top_down(cur.right, c)
s = lsum + rsum + cur.val
c[s] += 1
if c[s] > self.freq:
self.freq = c[s]
self.most_freq = [s]
elif c[s] == self.freq:
self.most_freq.append(s)
return s
else:
return 0
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
c = Counter()
self.most_freq = []
self.freq = 0
self.top_down(root, c)
return self.most_freq
|
<commit_before><commit_msg>Add py solution for 508. Most Frequent Subtree Sum
508. Most Frequent Subtree Sum: https://leetcode.com/problems/most-frequent-subtree-sum/<commit_after>
|
from collections import Counter
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def top_down(self, cur, c):
if cur:
lsum = self.top_down(cur.left, c)
rsum = self.top_down(cur.right, c)
s = lsum + rsum + cur.val
c[s] += 1
if c[s] > self.freq:
self.freq = c[s]
self.most_freq = [s]
elif c[s] == self.freq:
self.most_freq.append(s)
return s
else:
return 0
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
c = Counter()
self.most_freq = []
self.freq = 0
self.top_down(root, c)
return self.most_freq
|
Add py solution for 508. Most Frequent Subtree Sum
508. Most Frequent Subtree Sum: https://leetcode.com/problems/most-frequent-subtree-sum/from collections import Counter
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def top_down(self, cur, c):
if cur:
lsum = self.top_down(cur.left, c)
rsum = self.top_down(cur.right, c)
s = lsum + rsum + cur.val
c[s] += 1
if c[s] > self.freq:
self.freq = c[s]
self.most_freq = [s]
elif c[s] == self.freq:
self.most_freq.append(s)
return s
else:
return 0
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
c = Counter()
self.most_freq = []
self.freq = 0
self.top_down(root, c)
return self.most_freq
|
<commit_before><commit_msg>Add py solution for 508. Most Frequent Subtree Sum
508. Most Frequent Subtree Sum: https://leetcode.com/problems/most-frequent-subtree-sum/<commit_after>from collections import Counter
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def top_down(self, cur, c):
if cur:
lsum = self.top_down(cur.left, c)
rsum = self.top_down(cur.right, c)
s = lsum + rsum + cur.val
c[s] += 1
if c[s] > self.freq:
self.freq = c[s]
self.most_freq = [s]
elif c[s] == self.freq:
self.most_freq.append(s)
return s
else:
return 0
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
c = Counter()
self.most_freq = []
self.freq = 0
self.top_down(root, c)
return self.most_freq
|
|
bf86fcc000c4bfc085bf198afcec9440b065aeeb
|
queen/positioning/__init__.py
|
queen/positioning/__init__.py
|
class Position(object):
"""Represents a postion and orientation in the arena."""
def __init__(self, x, y, r=0):
self.x = x
self.y = y
self.r = r % 360
def same_area(self, position, tolerance=1.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
if self.x - t <= position.x <= self.x + t:
if self.y - t <= position.y <= self.y + t:
return True
# Else
return False
def same_facing(self, position, tolerance=15.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
diff = abs(position.r - self.r)
if diff >= 180:
diff = 360 - diff
if diff <= t:
return True
# Else
return False
|
Add positioning module and Position object.
|
Add positioning module and Position object.
|
Python
|
mit
|
kalail/queen,kalail/queen
|
Add positioning module and Position object.
|
class Position(object):
"""Represents a postion and orientation in the arena."""
def __init__(self, x, y, r=0):
self.x = x
self.y = y
self.r = r % 360
def same_area(self, position, tolerance=1.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
if self.x - t <= position.x <= self.x + t:
if self.y - t <= position.y <= self.y + t:
return True
# Else
return False
def same_facing(self, position, tolerance=15.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
diff = abs(position.r - self.r)
if diff >= 180:
diff = 360 - diff
if diff <= t:
return True
# Else
return False
|
<commit_before><commit_msg>Add positioning module and Position object.<commit_after>
|
class Position(object):
"""Represents a postion and orientation in the arena."""
def __init__(self, x, y, r=0):
self.x = x
self.y = y
self.r = r % 360
def same_area(self, position, tolerance=1.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
if self.x - t <= position.x <= self.x + t:
if self.y - t <= position.y <= self.y + t:
return True
# Else
return False
def same_facing(self, position, tolerance=15.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
diff = abs(position.r - self.r)
if diff >= 180:
diff = 360 - diff
if diff <= t:
return True
# Else
return False
|
Add positioning module and Position object.
class Position(object):
"""Represents a postion and orientation in the arena."""
def __init__(self, x, y, r=0):
self.x = x
self.y = y
self.r = r % 360
def same_area(self, position, tolerance=1.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
if self.x - t <= position.x <= self.x + t:
if self.y - t <= position.y <= self.y + t:
return True
# Else
return False
def same_facing(self, position, tolerance=15.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
diff = abs(position.r - self.r)
if diff >= 180:
diff = 360 - diff
if diff <= t:
return True
# Else
return False
|
<commit_before><commit_msg>Add positioning module and Position object.<commit_after>
class Position(object):
"""Represents a postion and orientation in the arena."""
def __init__(self, x, y, r=0):
self.x = x
self.y = y
self.r = r % 360
def same_area(self, position, tolerance=1.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
if self.x - t <= position.x <= self.x + t:
if self.y - t <= position.y <= self.y + t:
return True
# Else
return False
def same_facing(self, position, tolerance=15.0):
"""Returns whether the position is aproximately equal to the given position within said tolerance."""
t = tolerance
diff = abs(position.r - self.r)
if diff >= 180:
diff = 360 - diff
if diff <= t:
return True
# Else
return False
|
|
5bab5673e120e0e4bdb48a55e69bb5ae30e2269e
|
vimiv/image_enhance.py
|
vimiv/image_enhance.py
|
# vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import Gdk
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Integer value between -127 and 127 to change brightness.
contrast: Integer value between -127 and 127 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
# Work with Cairo surface as this can be transformed from and to bytes
# quickly
surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, 1, None)
width = surface.get_width()
height = surface.get_height()
data = surface.get_data().tobytes()
# Update plain bytes using C extension
data = _image_enhance.enhance_bc(data, brightness, contrast)
surface = surface.create_for_data(data, surface.get_format(), width,
height, surface.get_stride())
# Create pixbuf from updated surface
return Gdk.pixbuf_get_from_surface(surface, 0, 0, width, height)
|
Add python wrapper for C extension
|
Add python wrapper for C extension
|
Python
|
mit
|
karlch/vimiv,karlch/vimiv,karlch/vimiv
|
Add python wrapper for C extension
|
# vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import Gdk
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Integer value between -127 and 127 to change brightness.
contrast: Integer value between -127 and 127 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
# Work with Cairo surface as this can be transformed from and to bytes
# quickly
surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, 1, None)
width = surface.get_width()
height = surface.get_height()
data = surface.get_data().tobytes()
# Update plain bytes using C extension
data = _image_enhance.enhance_bc(data, brightness, contrast)
surface = surface.create_for_data(data, surface.get_format(), width,
height, surface.get_stride())
# Create pixbuf from updated surface
return Gdk.pixbuf_get_from_surface(surface, 0, 0, width, height)
|
<commit_before><commit_msg>Add python wrapper for C extension<commit_after>
|
# vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import Gdk
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Integer value between -127 and 127 to change brightness.
contrast: Integer value between -127 and 127 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
# Work with Cairo surface as this can be transformed from and to bytes
# quickly
surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, 1, None)
width = surface.get_width()
height = surface.get_height()
data = surface.get_data().tobytes()
# Update plain bytes using C extension
data = _image_enhance.enhance_bc(data, brightness, contrast)
surface = surface.create_for_data(data, surface.get_format(), width,
height, surface.get_stride())
# Create pixbuf from updated surface
return Gdk.pixbuf_get_from_surface(surface, 0, 0, width, height)
|
Add python wrapper for C extension# vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import Gdk
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Integer value between -127 and 127 to change brightness.
contrast: Integer value between -127 and 127 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
# Work with Cairo surface as this can be transformed from and to bytes
# quickly
surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, 1, None)
width = surface.get_width()
height = surface.get_height()
data = surface.get_data().tobytes()
# Update plain bytes using C extension
data = _image_enhance.enhance_bc(data, brightness, contrast)
surface = surface.create_for_data(data, surface.get_format(), width,
height, surface.get_stride())
# Create pixbuf from updated surface
return Gdk.pixbuf_get_from_surface(surface, 0, 0, width, height)
|
<commit_before><commit_msg>Add python wrapper for C extension<commit_after># vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Wrapper functions for the _image_enhance C extension."""
from gi.repository import Gdk
from vimiv import _image_enhance
def enhance_bc(pixbuf, brightness, contrast):
"""Enhance brightness and contrast of a GdkPixbuf.Pixbuf.
Args:
pixbuf: Original GdkPixbuf.Pixbuf to work with.
brightness: Integer value between -127 and 127 to change brightness.
contrast: Integer value between -127 and 127 to change contrast.
Return:
The enhanced GdkPixbuf.Pixbuf
"""
# Work with Cairo surface as this can be transformed from and to bytes
# quickly
surface = Gdk.cairo_surface_create_from_pixbuf(pixbuf, 1, None)
width = surface.get_width()
height = surface.get_height()
data = surface.get_data().tobytes()
# Update plain bytes using C extension
data = _image_enhance.enhance_bc(data, brightness, contrast)
surface = surface.create_for_data(data, surface.get_format(), width,
height, surface.get_stride())
# Create pixbuf from updated surface
return Gdk.pixbuf_get_from_surface(surface, 0, 0, width, height)
|
|
62a6ce74ec6b27a89e6f903385c0fbd91a8a0db7
|
relay-bot/xrepeat.py
|
relay-bot/xrepeat.py
|
import os,sys,xmpp
currentUser = None
xmppClient = None
nicks = {}
###### Bot Command
def commandNick():
pass
def commandHelp(client, user):
message = """
!help - See this menu.
!nick <new name> - Change nick name.
"""
client.send(xmpp.Message(user, message))
###### Bot Logic
def parseCommand(client, user, message):
if message == '!help':
commandHelp(client, user)
return True
return False
def messageCB(client, message):
text = message.getBody()
user = message.getFrom()
if not parseCommand(client, user, text):
if text is not None:
roster = xmppClient.getRoster()
items = roster.getItems()
for item in items:
jid = xmpp.JID(user)
itemJID = xmpp.JID(item)
sender = jid.getNode()
receiver = itemJID.getNode()
if item <> currentUser and receiver <> sender:
message = "%s: %s"%(jid.getNode(), text)
client.send(xmpp.Message(item, message))
###### Bot initial process
def stepOn(client):
try:
client.Process(1)
except KeyboardInterrupt:
return 0
return 1
def goOn(client):
while stepOn(client):
pass
if len(sys.argv) < 3:
print "Usage: xrepeat.py username@server.net password"
else:
jid = xmpp.JID(sys.argv[1])
user, server, password = jid.getNode(), jid.getDomain(), sys.argv[2]
currentUser = sys.argv[1]
xmppClient = xmpp.Client(server, debug = [])
connectionResource = xmppClient.connect()
if not connectionResource:
print "Unable to connect to server %s!"%server
sys.exit(1)
if connectionResource <> 'tls':
print "Warning: unable to establish secure connection - TLS failed!"
authorizedResource = xmppClient.auth(user, password)
if not authorizedResource:
print "Unable to autorize on %s - check login/password."%server
sys.exit(1)
if authorizedResource <> 'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
xmppClient.RegisterHandler('message', messageCB)
xmppClient.sendInitPresence()
print "Repeat bot started"
goOn(xmppClient)
|
Add XMPP relay message bot.
|
Add XMPP relay message bot.
|
Python
|
mit
|
llun/wordpress-authenticator
|
Add XMPP relay message bot.
|
import os,sys,xmpp
currentUser = None
xmppClient = None
nicks = {}
###### Bot Command
def commandNick():
pass
def commandHelp(client, user):
message = """
!help - See this menu.
!nick <new name> - Change nick name.
"""
client.send(xmpp.Message(user, message))
###### Bot Logic
def parseCommand(client, user, message):
if message == '!help':
commandHelp(client, user)
return True
return False
def messageCB(client, message):
text = message.getBody()
user = message.getFrom()
if not parseCommand(client, user, text):
if text is not None:
roster = xmppClient.getRoster()
items = roster.getItems()
for item in items:
jid = xmpp.JID(user)
itemJID = xmpp.JID(item)
sender = jid.getNode()
receiver = itemJID.getNode()
if item <> currentUser and receiver <> sender:
message = "%s: %s"%(jid.getNode(), text)
client.send(xmpp.Message(item, message))
###### Bot initial process
def stepOn(client):
try:
client.Process(1)
except KeyboardInterrupt:
return 0
return 1
def goOn(client):
while stepOn(client):
pass
if len(sys.argv) < 3:
print "Usage: xrepeat.py username@server.net password"
else:
jid = xmpp.JID(sys.argv[1])
user, server, password = jid.getNode(), jid.getDomain(), sys.argv[2]
currentUser = sys.argv[1]
xmppClient = xmpp.Client(server, debug = [])
connectionResource = xmppClient.connect()
if not connectionResource:
print "Unable to connect to server %s!"%server
sys.exit(1)
if connectionResource <> 'tls':
print "Warning: unable to establish secure connection - TLS failed!"
authorizedResource = xmppClient.auth(user, password)
if not authorizedResource:
print "Unable to autorize on %s - check login/password."%server
sys.exit(1)
if authorizedResource <> 'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
xmppClient.RegisterHandler('message', messageCB)
xmppClient.sendInitPresence()
print "Repeat bot started"
goOn(xmppClient)
|
<commit_before><commit_msg>Add XMPP relay message bot.<commit_after>
|
import os,sys,xmpp
currentUser = None
xmppClient = None
nicks = {}
###### Bot Command
def commandNick():
pass
def commandHelp(client, user):
message = """
!help - See this menu.
!nick <new name> - Change nick name.
"""
client.send(xmpp.Message(user, message))
###### Bot Logic
def parseCommand(client, user, message):
if message == '!help':
commandHelp(client, user)
return True
return False
def messageCB(client, message):
text = message.getBody()
user = message.getFrom()
if not parseCommand(client, user, text):
if text is not None:
roster = xmppClient.getRoster()
items = roster.getItems()
for item in items:
jid = xmpp.JID(user)
itemJID = xmpp.JID(item)
sender = jid.getNode()
receiver = itemJID.getNode()
if item <> currentUser and receiver <> sender:
message = "%s: %s"%(jid.getNode(), text)
client.send(xmpp.Message(item, message))
###### Bot initial process
def stepOn(client):
try:
client.Process(1)
except KeyboardInterrupt:
return 0
return 1
def goOn(client):
while stepOn(client):
pass
if len(sys.argv) < 3:
print "Usage: xrepeat.py username@server.net password"
else:
jid = xmpp.JID(sys.argv[1])
user, server, password = jid.getNode(), jid.getDomain(), sys.argv[2]
currentUser = sys.argv[1]
xmppClient = xmpp.Client(server, debug = [])
connectionResource = xmppClient.connect()
if not connectionResource:
print "Unable to connect to server %s!"%server
sys.exit(1)
if connectionResource <> 'tls':
print "Warning: unable to establish secure connection - TLS failed!"
authorizedResource = xmppClient.auth(user, password)
if not authorizedResource:
print "Unable to autorize on %s - check login/password."%server
sys.exit(1)
if authorizedResource <> 'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
xmppClient.RegisterHandler('message', messageCB)
xmppClient.sendInitPresence()
print "Repeat bot started"
goOn(xmppClient)
|
Add XMPP relay message bot.import os,sys,xmpp
currentUser = None
xmppClient = None
nicks = {}
###### Bot Command
def commandNick():
pass
def commandHelp(client, user):
message = """
!help - See this menu.
!nick <new name> - Change nick name.
"""
client.send(xmpp.Message(user, message))
###### Bot Logic
def parseCommand(client, user, message):
if message == '!help':
commandHelp(client, user)
return True
return False
def messageCB(client, message):
text = message.getBody()
user = message.getFrom()
if not parseCommand(client, user, text):
if text is not None:
roster = xmppClient.getRoster()
items = roster.getItems()
for item in items:
jid = xmpp.JID(user)
itemJID = xmpp.JID(item)
sender = jid.getNode()
receiver = itemJID.getNode()
if item <> currentUser and receiver <> sender:
message = "%s: %s"%(jid.getNode(), text)
client.send(xmpp.Message(item, message))
###### Bot initial process
def stepOn(client):
try:
client.Process(1)
except KeyboardInterrupt:
return 0
return 1
def goOn(client):
while stepOn(client):
pass
if len(sys.argv) < 3:
print "Usage: xrepeat.py username@server.net password"
else:
jid = xmpp.JID(sys.argv[1])
user, server, password = jid.getNode(), jid.getDomain(), sys.argv[2]
currentUser = sys.argv[1]
xmppClient = xmpp.Client(server, debug = [])
connectionResource = xmppClient.connect()
if not connectionResource:
print "Unable to connect to server %s!"%server
sys.exit(1)
if connectionResource <> 'tls':
print "Warning: unable to establish secure connection - TLS failed!"
authorizedResource = xmppClient.auth(user, password)
if not authorizedResource:
print "Unable to autorize on %s - check login/password."%server
sys.exit(1)
if authorizedResource <> 'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
xmppClient.RegisterHandler('message', messageCB)
xmppClient.sendInitPresence()
print "Repeat bot started"
goOn(xmppClient)
|
<commit_before><commit_msg>Add XMPP relay message bot.<commit_after>import os,sys,xmpp
currentUser = None
xmppClient = None
nicks = {}
###### Bot Command
def commandNick():
pass
def commandHelp(client, user):
message = """
!help - See this menu.
!nick <new name> - Change nick name.
"""
client.send(xmpp.Message(user, message))
###### Bot Logic
def parseCommand(client, user, message):
if message == '!help':
commandHelp(client, user)
return True
return False
def messageCB(client, message):
text = message.getBody()
user = message.getFrom()
if not parseCommand(client, user, text):
if text is not None:
roster = xmppClient.getRoster()
items = roster.getItems()
for item in items:
jid = xmpp.JID(user)
itemJID = xmpp.JID(item)
sender = jid.getNode()
receiver = itemJID.getNode()
if item <> currentUser and receiver <> sender:
message = "%s: %s"%(jid.getNode(), text)
client.send(xmpp.Message(item, message))
###### Bot initial process
def stepOn(client):
try:
client.Process(1)
except KeyboardInterrupt:
return 0
return 1
def goOn(client):
while stepOn(client):
pass
if len(sys.argv) < 3:
print "Usage: xrepeat.py username@server.net password"
else:
jid = xmpp.JID(sys.argv[1])
user, server, password = jid.getNode(), jid.getDomain(), sys.argv[2]
currentUser = sys.argv[1]
xmppClient = xmpp.Client(server, debug = [])
connectionResource = xmppClient.connect()
if not connectionResource:
print "Unable to connect to server %s!"%server
sys.exit(1)
if connectionResource <> 'tls':
print "Warning: unable to establish secure connection - TLS failed!"
authorizedResource = xmppClient.auth(user, password)
if not authorizedResource:
print "Unable to autorize on %s - check login/password."%server
sys.exit(1)
if authorizedResource <> 'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
xmppClient.RegisterHandler('message', messageCB)
xmppClient.sendInitPresence()
print "Repeat bot started"
goOn(xmppClient)
|
|
1831accb7250b157639f9fc254b33b2976a4f0b7
|
tests/test_plugin.py
|
tests/test_plugin.py
|
from pathlib import Path
from ckanext.nhm.plugin import NHMPlugin
def test_download_modify_email_templates():
plugin = NHMPlugin()
original_plain = 'original plain'
original_html = 'original html'
base = Path(__file__).parent.parent / 'ckanext' / 'nhm' / 'src' / 'download_emails'
with (base / 'body.txt').open() as f:
plain_contents = f.read().strip()
with (base / 'body.html').open() as f:
html_contents = f.read().strip()
plain, html = plugin.download_modify_email_templates(original_plain, original_html)
assert plain != original_plain
assert html != original_html
assert plain == plain_contents
assert html == html_contents
|
Test plugin integration with IVersionedDatastoreDownloads
|
Test plugin integration with IVersionedDatastoreDownloads
|
Python
|
mit
|
NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm
|
Test plugin integration with IVersionedDatastoreDownloads
|
from pathlib import Path
from ckanext.nhm.plugin import NHMPlugin
def test_download_modify_email_templates():
plugin = NHMPlugin()
original_plain = 'original plain'
original_html = 'original html'
base = Path(__file__).parent.parent / 'ckanext' / 'nhm' / 'src' / 'download_emails'
with (base / 'body.txt').open() as f:
plain_contents = f.read().strip()
with (base / 'body.html').open() as f:
html_contents = f.read().strip()
plain, html = plugin.download_modify_email_templates(original_plain, original_html)
assert plain != original_plain
assert html != original_html
assert plain == plain_contents
assert html == html_contents
|
<commit_before><commit_msg>Test plugin integration with IVersionedDatastoreDownloads<commit_after>
|
from pathlib import Path
from ckanext.nhm.plugin import NHMPlugin
def test_download_modify_email_templates():
plugin = NHMPlugin()
original_plain = 'original plain'
original_html = 'original html'
base = Path(__file__).parent.parent / 'ckanext' / 'nhm' / 'src' / 'download_emails'
with (base / 'body.txt').open() as f:
plain_contents = f.read().strip()
with (base / 'body.html').open() as f:
html_contents = f.read().strip()
plain, html = plugin.download_modify_email_templates(original_plain, original_html)
assert plain != original_plain
assert html != original_html
assert plain == plain_contents
assert html == html_contents
|
Test plugin integration with IVersionedDatastoreDownloadsfrom pathlib import Path
from ckanext.nhm.plugin import NHMPlugin
def test_download_modify_email_templates():
plugin = NHMPlugin()
original_plain = 'original plain'
original_html = 'original html'
base = Path(__file__).parent.parent / 'ckanext' / 'nhm' / 'src' / 'download_emails'
with (base / 'body.txt').open() as f:
plain_contents = f.read().strip()
with (base / 'body.html').open() as f:
html_contents = f.read().strip()
plain, html = plugin.download_modify_email_templates(original_plain, original_html)
assert plain != original_plain
assert html != original_html
assert plain == plain_contents
assert html == html_contents
|
<commit_before><commit_msg>Test plugin integration with IVersionedDatastoreDownloads<commit_after>from pathlib import Path
from ckanext.nhm.plugin import NHMPlugin
def test_download_modify_email_templates():
plugin = NHMPlugin()
original_plain = 'original plain'
original_html = 'original html'
base = Path(__file__).parent.parent / 'ckanext' / 'nhm' / 'src' / 'download_emails'
with (base / 'body.txt').open() as f:
plain_contents = f.read().strip()
with (base / 'body.html').open() as f:
html_contents = f.read().strip()
plain, html = plugin.download_modify_email_templates(original_plain, original_html)
assert plain != original_plain
assert html != original_html
assert plain == plain_contents
assert html == html_contents
|
|
2c4dad9f2249aafd6aa57faf4d8435ba29f97aa6
|
tests/convergence_tests/spheres_multiple_lspr.py
|
tests/convergence_tests/spheres_multiple_lspr.py
|
import numpy
from convergence_lspr import (run_convergence, picklesave, pickleload,
report_results, richardson_extrapolation_lspr,
mesh_multiple)
def main():
print('{:-^60}'.format('Running sphere_multiple_lspr test'))
try:
test_outputs = pickleload()
except FileNotFoundError:
test_outputs = {}
problem_folder = 'input_files'
# multiple spheres lspr
param = 'sphere_complex.param'
test_names = 'sphere_multiple_complex'
#In this case we have 1 sphere of radius 10 and 6 of radius 2
R=10.
r=2.
total_Area = 4*numpy.pi (R*R + 6*r*r)
if test_name not in test_outputs.keys():
N, avg_density, iterations, expected_rate, Cext_0, Time = run_convergence(
mesh_multiple, test_name, problem_folder, param, total_Area=total_Area)
test_outputs[test_name] = {'N': N,
'avg_density': avg_density,
'iterations': iterations,
'expected_rate': expected_rate,
'Cext_0': Cext_0,
'Time': Time}
# load data for analysis
N = test_outputs[test_name]['N']
avg_density = test_outputs[test_name]['avg_density']
iterations = test_outputs[test_name]['iterations']
expected_rate = test_outputs[test_name]['expected_rate']
Cext_0 = test_outputs[test_name]['Cext_0']
Time = test_outputs[test_name]['Time']
total_time = Time
#Richardson extrapolation on the main sphere:
rich_extra = richardson_extrapolation_lspr(test_outputs[test_name]['Cext_0'])
error = abs(Cext_0 - rich_extra) / abs(rich_extra)
test_outputs[test_name]['error'] = error
test_outputs[test_name]['rich_extra'] = rich_extra
picklesave(test_outputs)
report_results(error,
N,
expected_rate,
iterations,
Cext_0,
analytical=None,
total_time,
test_name=test_name,
rich_extra=rich_extra,
avg_density=avg_density)
if __name__ == "__main__":
main()
|
Add covnergence test for multiple spheres
|
Add covnergence test for multiple spheres
|
Python
|
bsd-3-clause
|
barbagroup/pygbe,barbagroup/pygbe,barbagroup/pygbe
|
Add covnergence test for multiple spheres
|
import numpy
from convergence_lspr import (run_convergence, picklesave, pickleload,
report_results, richardson_extrapolation_lspr,
mesh_multiple)
def main():
print('{:-^60}'.format('Running sphere_multiple_lspr test'))
try:
test_outputs = pickleload()
except FileNotFoundError:
test_outputs = {}
problem_folder = 'input_files'
# multiple spheres lspr
param = 'sphere_complex.param'
test_names = 'sphere_multiple_complex'
#In this case we have 1 sphere of radius 10 and 6 of radius 2
R=10.
r=2.
total_Area = 4*numpy.pi (R*R + 6*r*r)
if test_name not in test_outputs.keys():
N, avg_density, iterations, expected_rate, Cext_0, Time = run_convergence(
mesh_multiple, test_name, problem_folder, param, total_Area=total_Area)
test_outputs[test_name] = {'N': N,
'avg_density': avg_density,
'iterations': iterations,
'expected_rate': expected_rate,
'Cext_0': Cext_0,
'Time': Time}
# load data for analysis
N = test_outputs[test_name]['N']
avg_density = test_outputs[test_name]['avg_density']
iterations = test_outputs[test_name]['iterations']
expected_rate = test_outputs[test_name]['expected_rate']
Cext_0 = test_outputs[test_name]['Cext_0']
Time = test_outputs[test_name]['Time']
total_time = Time
#Richardson extrapolation on the main sphere:
rich_extra = richardson_extrapolation_lspr(test_outputs[test_name]['Cext_0'])
error = abs(Cext_0 - rich_extra) / abs(rich_extra)
test_outputs[test_name]['error'] = error
test_outputs[test_name]['rich_extra'] = rich_extra
picklesave(test_outputs)
report_results(error,
N,
expected_rate,
iterations,
Cext_0,
analytical=None,
total_time,
test_name=test_name,
rich_extra=rich_extra,
avg_density=avg_density)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add covnergence test for multiple spheres<commit_after>
|
import numpy
from convergence_lspr import (run_convergence, picklesave, pickleload,
report_results, richardson_extrapolation_lspr,
mesh_multiple)
def main():
print('{:-^60}'.format('Running sphere_multiple_lspr test'))
try:
test_outputs = pickleload()
except FileNotFoundError:
test_outputs = {}
problem_folder = 'input_files'
# multiple spheres lspr
param = 'sphere_complex.param'
test_names = 'sphere_multiple_complex'
#In this case we have 1 sphere of radius 10 and 6 of radius 2
R=10.
r=2.
total_Area = 4*numpy.pi (R*R + 6*r*r)
if test_name not in test_outputs.keys():
N, avg_density, iterations, expected_rate, Cext_0, Time = run_convergence(
mesh_multiple, test_name, problem_folder, param, total_Area=total_Area)
test_outputs[test_name] = {'N': N,
'avg_density': avg_density,
'iterations': iterations,
'expected_rate': expected_rate,
'Cext_0': Cext_0,
'Time': Time}
# load data for analysis
N = test_outputs[test_name]['N']
avg_density = test_outputs[test_name]['avg_density']
iterations = test_outputs[test_name]['iterations']
expected_rate = test_outputs[test_name]['expected_rate']
Cext_0 = test_outputs[test_name]['Cext_0']
Time = test_outputs[test_name]['Time']
total_time = Time
#Richardson extrapolation on the main sphere:
rich_extra = richardson_extrapolation_lspr(test_outputs[test_name]['Cext_0'])
error = abs(Cext_0 - rich_extra) / abs(rich_extra)
test_outputs[test_name]['error'] = error
test_outputs[test_name]['rich_extra'] = rich_extra
picklesave(test_outputs)
report_results(error,
N,
expected_rate,
iterations,
Cext_0,
analytical=None,
total_time,
test_name=test_name,
rich_extra=rich_extra,
avg_density=avg_density)
if __name__ == "__main__":
main()
|
Add covnergence test for multiple spheresimport numpy
from convergence_lspr import (run_convergence, picklesave, pickleload,
report_results, richardson_extrapolation_lspr,
mesh_multiple)
def main():
print('{:-^60}'.format('Running sphere_multiple_lspr test'))
try:
test_outputs = pickleload()
except FileNotFoundError:
test_outputs = {}
problem_folder = 'input_files'
# multiple spheres lspr
param = 'sphere_complex.param'
test_names = 'sphere_multiple_complex'
#In this case we have 1 sphere of radius 10 and 6 of radius 2
R=10.
r=2.
total_Area = 4*numpy.pi (R*R + 6*r*r)
if test_name not in test_outputs.keys():
N, avg_density, iterations, expected_rate, Cext_0, Time = run_convergence(
mesh_multiple, test_name, problem_folder, param, total_Area=total_Area)
test_outputs[test_name] = {'N': N,
'avg_density': avg_density,
'iterations': iterations,
'expected_rate': expected_rate,
'Cext_0': Cext_0,
'Time': Time}
# load data for analysis
N = test_outputs[test_name]['N']
avg_density = test_outputs[test_name]['avg_density']
iterations = test_outputs[test_name]['iterations']
expected_rate = test_outputs[test_name]['expected_rate']
Cext_0 = test_outputs[test_name]['Cext_0']
Time = test_outputs[test_name]['Time']
total_time = Time
#Richardson extrapolation on the main sphere:
rich_extra = richardson_extrapolation_lspr(test_outputs[test_name]['Cext_0'])
error = abs(Cext_0 - rich_extra) / abs(rich_extra)
test_outputs[test_name]['error'] = error
test_outputs[test_name]['rich_extra'] = rich_extra
picklesave(test_outputs)
report_results(error,
N,
expected_rate,
iterations,
Cext_0,
analytical=None,
total_time,
test_name=test_name,
rich_extra=rich_extra,
avg_density=avg_density)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add covnergence test for multiple spheres<commit_after>import numpy
from convergence_lspr import (run_convergence, picklesave, pickleload,
report_results, richardson_extrapolation_lspr,
mesh_multiple)
def main():
print('{:-^60}'.format('Running sphere_multiple_lspr test'))
try:
test_outputs = pickleload()
except FileNotFoundError:
test_outputs = {}
problem_folder = 'input_files'
# multiple spheres lspr
param = 'sphere_complex.param'
test_names = 'sphere_multiple_complex'
#In this case we have 1 sphere of radius 10 and 6 of radius 2
R=10.
r=2.
total_Area = 4*numpy.pi (R*R + 6*r*r)
if test_name not in test_outputs.keys():
N, avg_density, iterations, expected_rate, Cext_0, Time = run_convergence(
mesh_multiple, test_name, problem_folder, param, total_Area=total_Area)
test_outputs[test_name] = {'N': N,
'avg_density': avg_density,
'iterations': iterations,
'expected_rate': expected_rate,
'Cext_0': Cext_0,
'Time': Time}
# load data for analysis
N = test_outputs[test_name]['N']
avg_density = test_outputs[test_name]['avg_density']
iterations = test_outputs[test_name]['iterations']
expected_rate = test_outputs[test_name]['expected_rate']
Cext_0 = test_outputs[test_name]['Cext_0']
Time = test_outputs[test_name]['Time']
total_time = Time
#Richardson extrapolation on the main sphere:
rich_extra = richardson_extrapolation_lspr(test_outputs[test_name]['Cext_0'])
error = abs(Cext_0 - rich_extra) / abs(rich_extra)
test_outputs[test_name]['error'] = error
test_outputs[test_name]['rich_extra'] = rich_extra
picklesave(test_outputs)
report_results(error,
N,
expected_rate,
iterations,
Cext_0,
analytical=None,
total_time,
test_name=test_name,
rich_extra=rich_extra,
avg_density=avg_density)
if __name__ == "__main__":
main()
|
|
4c34cb983b0ef009472bdd00cdb24ba9588161c0
|
fileconv/tests/strip_js_comments.py
|
fileconv/tests/strip_js_comments.py
|
import re
reexpr = r"""
( # Capture code
(?:
"(?:\\.|[^"\\])*" # String literal
|
'(?:\\.|[^'\\])*' # String literal
|
(?:[^/\n"']|/[^/*\n"'])+ # Any code besides newlines or string literals (essentially no comments)
|
\n # Newline
)+ # Repeat
)|
(/\* (?:[^*]|\*[^/])* \*/) # Multi-line comment
|
(?://(.*)$) # Comment
"""
rx = re.compile(reexpr, re.VERBOSE + re.MULTILINE)
# This regex matches with three different subgroups. One for code and two for comment contents. Below is a example of how to extract those.
code = r"""// this is a comment
var x = 2 * 4 // and this is a comment too
var url = "http://www.google.com/" // and "this" too
url += 'but // this is not a comment' // however this one is
url += 'this "is not a comment' + " and ' neither is this //" // only this
bar = 'http://no.comments.com/' // these // are // comments
bar = 'text // string \' no // more //\\' // comments
bar = 'http://no.comments.com/' /*
multiline */
bar = /var/ // comment
/* comment 1 */
bar = open() /* comment 2 */
bar = open() /* comment 2b */// another comment
bar = open( /* comment 3 */ file) // another comment
"""
parts = rx.findall(code)
print('*' * 80, '\nCode:\n\n', ''.join(x[0].strip(' ') for x in parts))
print('*' * 80, '\nMulti line comments:\n\n', '\n'.join(x[1] for x in parts if x[1].strip()))
print('*' * 80, '\nOne line comments:\n\n', '\n'.join(x[2] for x in parts if x[2].strip()))
|
Add a (weird) test file for testing the javascript comment stripping
|
Add a (weird) test file for testing the javascript comment stripping
I had this on my drive anyway so why not just add it to the repo.
|
Python
|
mit
|
SublimeText/AAAPackageDev,SublimeText/AAAPackageDev,SublimeText/PackageDev
|
Add a (weird) test file for testing the javascript comment stripping
I had this on my drive anyway so why not just add it to the repo.
|
import re
reexpr = r"""
( # Capture code
(?:
"(?:\\.|[^"\\])*" # String literal
|
'(?:\\.|[^'\\])*' # String literal
|
(?:[^/\n"']|/[^/*\n"'])+ # Any code besides newlines or string literals (essentially no comments)
|
\n # Newline
)+ # Repeat
)|
(/\* (?:[^*]|\*[^/])* \*/) # Multi-line comment
|
(?://(.*)$) # Comment
"""
rx = re.compile(reexpr, re.VERBOSE + re.MULTILINE)
# This regex matches with three different subgroups. One for code and two for comment contents. Below is a example of how to extract those.
code = r"""// this is a comment
var x = 2 * 4 // and this is a comment too
var url = "http://www.google.com/" // and "this" too
url += 'but // this is not a comment' // however this one is
url += 'this "is not a comment' + " and ' neither is this //" // only this
bar = 'http://no.comments.com/' // these // are // comments
bar = 'text // string \' no // more //\\' // comments
bar = 'http://no.comments.com/' /*
multiline */
bar = /var/ // comment
/* comment 1 */
bar = open() /* comment 2 */
bar = open() /* comment 2b */// another comment
bar = open( /* comment 3 */ file) // another comment
"""
parts = rx.findall(code)
print('*' * 80, '\nCode:\n\n', ''.join(x[0].strip(' ') for x in parts))
print('*' * 80, '\nMulti line comments:\n\n', '\n'.join(x[1] for x in parts if x[1].strip()))
print('*' * 80, '\nOne line comments:\n\n', '\n'.join(x[2] for x in parts if x[2].strip()))
|
<commit_before><commit_msg>Add a (weird) test file for testing the javascript comment stripping
I had this on my drive anyway so why not just add it to the repo.<commit_after>
|
import re
reexpr = r"""
( # Capture code
(?:
"(?:\\.|[^"\\])*" # String literal
|
'(?:\\.|[^'\\])*' # String literal
|
(?:[^/\n"']|/[^/*\n"'])+ # Any code besides newlines or string literals (essentially no comments)
|
\n # Newline
)+ # Repeat
)|
(/\* (?:[^*]|\*[^/])* \*/) # Multi-line comment
|
(?://(.*)$) # Comment
"""
rx = re.compile(reexpr, re.VERBOSE + re.MULTILINE)
# This regex matches with three different subgroups. One for code and two for comment contents. Below is a example of how to extract those.
code = r"""// this is a comment
var x = 2 * 4 // and this is a comment too
var url = "http://www.google.com/" // and "this" too
url += 'but // this is not a comment' // however this one is
url += 'this "is not a comment' + " and ' neither is this //" // only this
bar = 'http://no.comments.com/' // these // are // comments
bar = 'text // string \' no // more //\\' // comments
bar = 'http://no.comments.com/' /*
multiline */
bar = /var/ // comment
/* comment 1 */
bar = open() /* comment 2 */
bar = open() /* comment 2b */// another comment
bar = open( /* comment 3 */ file) // another comment
"""
parts = rx.findall(code)
print('*' * 80, '\nCode:\n\n', ''.join(x[0].strip(' ') for x in parts))
print('*' * 80, '\nMulti line comments:\n\n', '\n'.join(x[1] for x in parts if x[1].strip()))
print('*' * 80, '\nOne line comments:\n\n', '\n'.join(x[2] for x in parts if x[2].strip()))
|
Add a (weird) test file for testing the javascript comment stripping
I had this on my drive anyway so why not just add it to the repo.import re
reexpr = r"""
( # Capture code
(?:
"(?:\\.|[^"\\])*" # String literal
|
'(?:\\.|[^'\\])*' # String literal
|
(?:[^/\n"']|/[^/*\n"'])+ # Any code besides newlines or string literals (essentially no comments)
|
\n # Newline
)+ # Repeat
)|
(/\* (?:[^*]|\*[^/])* \*/) # Multi-line comment
|
(?://(.*)$) # Comment
"""
rx = re.compile(reexpr, re.VERBOSE + re.MULTILINE)
# This regex matches with three different subgroups. One for code and two for comment contents. Below is a example of how to extract those.
code = r"""// this is a comment
var x = 2 * 4 // and this is a comment too
var url = "http://www.google.com/" // and "this" too
url += 'but // this is not a comment' // however this one is
url += 'this "is not a comment' + " and ' neither is this //" // only this
bar = 'http://no.comments.com/' // these // are // comments
bar = 'text // string \' no // more //\\' // comments
bar = 'http://no.comments.com/' /*
multiline */
bar = /var/ // comment
/* comment 1 */
bar = open() /* comment 2 */
bar = open() /* comment 2b */// another comment
bar = open( /* comment 3 */ file) // another comment
"""
parts = rx.findall(code)
print('*' * 80, '\nCode:\n\n', ''.join(x[0].strip(' ') for x in parts))
print('*' * 80, '\nMulti line comments:\n\n', '\n'.join(x[1] for x in parts if x[1].strip()))
print('*' * 80, '\nOne line comments:\n\n', '\n'.join(x[2] for x in parts if x[2].strip()))
|
<commit_before><commit_msg>Add a (weird) test file for testing the javascript comment stripping
I had this on my drive anyway so why not just add it to the repo.<commit_after>import re
reexpr = r"""
( # Capture code
(?:
"(?:\\.|[^"\\])*" # String literal
|
'(?:\\.|[^'\\])*' # String literal
|
(?:[^/\n"']|/[^/*\n"'])+ # Any code besides newlines or string literals (essentially no comments)
|
\n # Newline
)+ # Repeat
)|
(/\* (?:[^*]|\*[^/])* \*/) # Multi-line comment
|
(?://(.*)$) # Comment
"""
rx = re.compile(reexpr, re.VERBOSE + re.MULTILINE)
# This regex matches with three different subgroups. One for code and two for comment contents. Below is a example of how to extract those.
code = r"""// this is a comment
var x = 2 * 4 // and this is a comment too
var url = "http://www.google.com/" // and "this" too
url += 'but // this is not a comment' // however this one is
url += 'this "is not a comment' + " and ' neither is this //" // only this
bar = 'http://no.comments.com/' // these // are // comments
bar = 'text // string \' no // more //\\' // comments
bar = 'http://no.comments.com/' /*
multiline */
bar = /var/ // comment
/* comment 1 */
bar = open() /* comment 2 */
bar = open() /* comment 2b */// another comment
bar = open( /* comment 3 */ file) // another comment
"""
parts = rx.findall(code)
print('*' * 80, '\nCode:\n\n', ''.join(x[0].strip(' ') for x in parts))
print('*' * 80, '\nMulti line comments:\n\n', '\n'.join(x[1] for x in parts if x[1].strip()))
print('*' * 80, '\nOne line comments:\n\n', '\n'.join(x[2] for x in parts if x[2].strip()))
|
|
84c581b61eecc4599bce5bff66276f94a8aa31d7
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.MD').read(),
)
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.md').read(),
)
|
Make README file extension lowercase so build will pass on case sensitive systems
|
Make README file extension lowercase so build will pass on case sensitive systems
|
Python
|
mit
|
Fire30/pyxdevkit
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.MD').read(),
)Make README file extension lowercase so build will pass on case sensitive systems
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.md').read(),
)
|
<commit_before>from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.MD').read(),
)<commit_msg>Make README file extension lowercase so build will pass on case sensitive systems<commit_after>
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.md').read(),
)
|
from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.MD').read(),
)Make README file extension lowercase so build will pass on case sensitive systemsfrom distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.md').read(),
)
|
<commit_before>from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.MD').read(),
)<commit_msg>Make README file extension lowercase so build will pass on case sensitive systems<commit_after>from distutils.core import setup
setup(
name='pyxdevkit',
version='0.1dev',
packages=['pyxdevkit',],
license='MIT',
author='T.J. Corley',
author_email='tjcorley30@hotmail.com',
description='xdevkit for python.',
long_description=open('README.md').read(),
)
|
e6d081d6b13ddd0c3384a942e431e630f9805bde
|
ci/download_appveyor.py
|
ci/download_appveyor.py
|
#!/usr/bin/env python
# Downloads AppVeyor artifacts to the local directory.
#
# This script is a bit hacky. But it gets the job done.
import sys
import requests
def make_request(session, path):
url = 'https://ci.appveyor.com/api/%s' % path
return session.get(url)
def download_artifacts(project):
session = requests.session()
project_info = make_request(session, 'projects/%s' % project)
jobs = project_info.json()['build']['jobs']
for job in jobs:
print(job['name'])
if not job['artifactsCount']:
continue
artifacts = make_request(session, 'buildjobs/%s/artifacts' % job['jobId'])
for artifact in artifacts.json():
print('downloading %s' % artifact['fileName'])
response = make_request(session, 'buildjobs/%s/artifacts/%s' % (
job['jobId'], artifact['fileName']))
if response.status_code != 200:
continue
with open(artifact['fileName'], 'wb') as fh:
for chunk in response.iter_content(8192):
fh.write(chunk)
if __name__ == "__main__":
download_artifacts(sys.argv[1])
|
Add script to download AppVeyor artifacts
|
Add script to download AppVeyor artifacts
|
Python
|
bsd-3-clause
|
terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard
|
Add script to download AppVeyor artifacts
|
#!/usr/bin/env python
# Downloads AppVeyor artifacts to the local directory.
#
# This script is a bit hacky. But it gets the job done.
import sys
import requests
def make_request(session, path):
url = 'https://ci.appveyor.com/api/%s' % path
return session.get(url)
def download_artifacts(project):
session = requests.session()
project_info = make_request(session, 'projects/%s' % project)
jobs = project_info.json()['build']['jobs']
for job in jobs:
print(job['name'])
if not job['artifactsCount']:
continue
artifacts = make_request(session, 'buildjobs/%s/artifacts' % job['jobId'])
for artifact in artifacts.json():
print('downloading %s' % artifact['fileName'])
response = make_request(session, 'buildjobs/%s/artifacts/%s' % (
job['jobId'], artifact['fileName']))
if response.status_code != 200:
continue
with open(artifact['fileName'], 'wb') as fh:
for chunk in response.iter_content(8192):
fh.write(chunk)
if __name__ == "__main__":
download_artifacts(sys.argv[1])
|
<commit_before><commit_msg>Add script to download AppVeyor artifacts<commit_after>
|
#!/usr/bin/env python
# Downloads AppVeyor artifacts to the local directory.
#
# This script is a bit hacky. But it gets the job done.
import sys
import requests
def make_request(session, path):
url = 'https://ci.appveyor.com/api/%s' % path
return session.get(url)
def download_artifacts(project):
session = requests.session()
project_info = make_request(session, 'projects/%s' % project)
jobs = project_info.json()['build']['jobs']
for job in jobs:
print(job['name'])
if not job['artifactsCount']:
continue
artifacts = make_request(session, 'buildjobs/%s/artifacts' % job['jobId'])
for artifact in artifacts.json():
print('downloading %s' % artifact['fileName'])
response = make_request(session, 'buildjobs/%s/artifacts/%s' % (
job['jobId'], artifact['fileName']))
if response.status_code != 200:
continue
with open(artifact['fileName'], 'wb') as fh:
for chunk in response.iter_content(8192):
fh.write(chunk)
if __name__ == "__main__":
download_artifacts(sys.argv[1])
|
Add script to download AppVeyor artifacts#!/usr/bin/env python
# Downloads AppVeyor artifacts to the local directory.
#
# This script is a bit hacky. But it gets the job done.
import sys
import requests
def make_request(session, path):
url = 'https://ci.appveyor.com/api/%s' % path
return session.get(url)
def download_artifacts(project):
session = requests.session()
project_info = make_request(session, 'projects/%s' % project)
jobs = project_info.json()['build']['jobs']
for job in jobs:
print(job['name'])
if not job['artifactsCount']:
continue
artifacts = make_request(session, 'buildjobs/%s/artifacts' % job['jobId'])
for artifact in artifacts.json():
print('downloading %s' % artifact['fileName'])
response = make_request(session, 'buildjobs/%s/artifacts/%s' % (
job['jobId'], artifact['fileName']))
if response.status_code != 200:
continue
with open(artifact['fileName'], 'wb') as fh:
for chunk in response.iter_content(8192):
fh.write(chunk)
if __name__ == "__main__":
download_artifacts(sys.argv[1])
|
<commit_before><commit_msg>Add script to download AppVeyor artifacts<commit_after>#!/usr/bin/env python
# Downloads AppVeyor artifacts to the local directory.
#
# This script is a bit hacky. But it gets the job done.
import sys
import requests
def make_request(session, path):
url = 'https://ci.appveyor.com/api/%s' % path
return session.get(url)
def download_artifacts(project):
session = requests.session()
project_info = make_request(session, 'projects/%s' % project)
jobs = project_info.json()['build']['jobs']
for job in jobs:
print(job['name'])
if not job['artifactsCount']:
continue
artifacts = make_request(session, 'buildjobs/%s/artifacts' % job['jobId'])
for artifact in artifacts.json():
print('downloading %s' % artifact['fileName'])
response = make_request(session, 'buildjobs/%s/artifacts/%s' % (
job['jobId'], artifact['fileName']))
if response.status_code != 200:
continue
with open(artifact['fileName'], 'wb') as fh:
for chunk in response.iter_content(8192):
fh.write(chunk)
if __name__ == "__main__":
download_artifacts(sys.argv[1])
|
|
f543c113f86773f95e30920e1837764f9c5972a0
|
sympy/core/tests/test_noncommutative.py
|
sympy/core/tests/test_noncommutative.py
|
"""Tests for noncommutative symbols and expressions."""
from sympy import (
conjugate,
expand,
factor,
radsimp,
simplify,
symbols,
I,
)
from sympy.abc import x, y, z
A, B, C = symbols("A B C", commutative=False)
X, Y = symbols("X Y", commutative=False, real=True)
Z = X + I*Y
def test_complex():
assert Z.conjugate() == X - I*Y
assert (Z*Z.conjugate()).expand() == X**2 + Y**2 + I*Y*X - I*X*Y
def test_conjugate():
assert conjugate(A).is_commutative == False
assert (A*A).conjugate() == conjugate(A)**2
assert (A*B).conjugate() == conjugate(A)*conjugate(B)
assert (A*B**2).conjugate() == conjugate(A)*conjugate(B)**2
assert (A*B - B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
assert (A*B).conjugate() - (B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
def test_expand():
assert expand((A*B)**2) == A*B*A*B
assert expand(A*B - B*A) == A*B - B*A
assert expand((A*B/A)**2) == A*B*B/A
assert expand(B*A*(A + B)*B) == B*A**2*B + B*A*B**2
assert expand(B*A*(A + C)*B) == B*A**2*B + B*A*C*B
def test_factor():
assert factor(A*B - B*A) == A*B - B*A
def test_radsimp():
assert radsimp(A*B - B*A) == A*B - B*A
def test_simplify():
assert simplify(A*B - B*A) == A*B - B*A
def test_subs():
assert (x*y*A).subs(x*y, z) == A*z
assert (x*A*B).subs(x*A, C) == C*B
assert (x*A*x*x).subs(x**2*A, C) == x*C
assert (x*A*x*B).subs(x**2*A, C) == C*B
assert (A**2*B**2).subs(A*B**2, C) == A*C
assert (A*A*A + A*B*A).subs(A*A*A, C) == C + A*B*A
|
Add tests for noncommutative symbols.
|
Add tests for noncommutative symbols.
- complex
- conjugate
- expand
- factor
- radsimp
- simplify
- subs
|
Python
|
bsd-3-clause
|
Sumith1896/sympy,chaffra/sympy,maniteja123/sympy,pandeyadarsh/sympy,kumarkrishna/sympy,sahmed95/sympy,lidavidm/sympy,mcdaniel67/sympy,ga7g08/sympy,postvakje/sympy,garvitr/sympy,iamutkarshtiwari/sympy,dqnykamp/sympy,atreyv/sympy,madan96/sympy,atreyv/sympy,postvakje/sympy,MridulS/sympy,chaffra/sympy,souravsingh/sympy,moble/sympy,cccfran/sympy,shikil/sympy,diofant/diofant,Arafatk/sympy,ChristinaZografou/sympy,hargup/sympy,atsao72/sympy,ga7g08/sympy,hrashk/sympy,Shaswat27/sympy,kaichogami/sympy,debugger22/sympy,shikil/sympy,sunny94/temp,sunny94/temp,garvitr/sympy,meghana1995/sympy,lidavidm/sympy,jbbskinny/sympy,jbbskinny/sympy,pandeyadarsh/sympy,Designist/sympy,AkademieOlympia/sympy,grevutiu-gabriel/sympy,atsao72/sympy,Gadal/sympy,pbrady/sympy,wanglongqi/sympy,Titan-C/sympy,kaushik94/sympy,bukzor/sympy,iamutkarshtiwari/sympy,sunny94/temp,lindsayad/sympy,kevalds51/sympy,Davidjohnwilson/sympy,jbbskinny/sympy,sahilshekhawat/sympy,kumarkrishna/sympy,aktech/sympy,iamutkarshtiwari/sympy,asm666/sympy,Sumith1896/sympy,madan96/sympy,ahhda/sympy,vipulroxx/sympy,jamesblunt/sympy,kaushik94/sympy,hrashk/sympy,vipulroxx/sympy,jamesblunt/sympy,farhaanbukhsh/sympy,Designist/sympy,debugger22/sympy,cswiercz/sympy,AkademieOlympia/sympy,Davidjohnwilson/sympy,abloomston/sympy,pbrady/sympy,Gadal/sympy,MechCoder/sympy,farhaanbukhsh/sympy,hargup/sympy,maniteja123/sympy,atsao72/sympy,ga7g08/sympy,souravsingh/sympy,oliverlee/sympy,abloomston/sympy,VaibhavAgarwalVA/sympy,emon10005/sympy,Mitchkoens/sympy,kmacinnis/sympy,cswiercz/sympy,VaibhavAgarwalVA/sympy,kaichogami/sympy,skidzo/sympy,kmacinnis/sympy,sampadsaha5/sympy,drufat/sympy,grevutiu-gabriel/sympy,AunShiLord/sympy,kaichogami/sympy,atreyv/sympy,wanglongqi/sympy,toolforger/sympy,abhiii5459/sympy,vipulroxx/sympy,amitjamadagni/sympy,sahilshekhawat/sympy,yashsharan/sympy,MechCoder/sympy,cccfran/sympy,Titan-C/sympy,mcdaniel67/sympy,Vishluck/sympy,saurabhjn76/sympy,jerli/sympy,beni55/sympy,wyom/sympy,AunShiLord/sympy,kevalds51/sympy,shikil/sympy,ahhda/sympy,flacjacket/sympy,MridulS/sympy,ChristinaZografou/sympy,srjoglekar246/sympy,AkademieOlympia/sympy,mafiya69/sympy,bukzor/sympy,abhiii5459/sympy,cccfran/sympy,ChristinaZografou/sympy,kumarkrishna/sympy,Vishluck/sympy,chaffra/sympy,souravsingh/sympy,sahmed95/sympy,Curious72/sympy,oliverlee/sympy,asm666/sympy,madan96/sympy,mafiya69/sympy,Arafatk/sympy,beni55/sympy,jaimahajan1997/sympy,jerli/sympy,skidzo/sympy,Davidjohnwilson/sympy,debugger22/sympy,Vishluck/sympy,bukzor/sympy,asm666/sympy,Designist/sympy,wanglongqi/sympy,ahhda/sympy,rahuldan/sympy,mafiya69/sympy,yashsharan/sympy,garvitr/sympy,shipci/sympy,grevutiu-gabriel/sympy,shipci/sympy,liangjiaxing/sympy,rahuldan/sympy,Sumith1896/sympy,saurabhjn76/sympy,lindsayad/sympy,VaibhavAgarwalVA/sympy,skidzo/sympy,moble/sympy,MridulS/sympy,drufat/sympy,lindsayad/sympy,Mitchkoens/sympy,yashsharan/sympy,drufat/sympy,MechCoder/sympy,hrashk/sympy,yukoba/sympy,wyom/sympy,Shaswat27/sympy,sampadsaha5/sympy,Gadal/sympy,Mitchkoens/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,Arafatk/sympy,kmacinnis/sympy,AunShiLord/sympy,liangjiaxing/sympy,maniteja123/sympy,kaushik94/sympy,beni55/sympy,aktech/sympy,pandeyadarsh/sympy,skirpichev/omg,meghana1995/sympy,toolforger/sympy,Curious72/sympy,sahilshekhawat/sympy,Shaswat27/sympy,shipci/sympy,abhiii5459/sympy,jaimahajan1997/sympy,dqnykamp/sympy,amitjamadagni/sympy,abloomston/sympy,liangjiaxing/sympy,oliverlee/sympy,postvakje/sympy,hargup/sympy,lidavidm/sympy,emon10005/sympy,jamesblunt/sympy,Titan-C/sympy,yukoba/sympy,jerli/sympy,saurabhjn76/sympy,rahuldan/sympy,moble/sympy,emon10005/sympy,kevalds51/sympy,Curious72/sympy,cswiercz/sympy,wyom/sympy,meghana1995/sympy,pbrady/sympy,sampadsaha5/sympy,farhaanbukhsh/sympy,sahmed95/sympy,aktech/sympy,toolforger/sympy,yukoba/sympy,dqnykamp/sympy
|
Add tests for noncommutative symbols.
- complex
- conjugate
- expand
- factor
- radsimp
- simplify
- subs
|
"""Tests for noncommutative symbols and expressions."""
from sympy import (
conjugate,
expand,
factor,
radsimp,
simplify,
symbols,
I,
)
from sympy.abc import x, y, z
A, B, C = symbols("A B C", commutative=False)
X, Y = symbols("X Y", commutative=False, real=True)
Z = X + I*Y
def test_complex():
assert Z.conjugate() == X - I*Y
assert (Z*Z.conjugate()).expand() == X**2 + Y**2 + I*Y*X - I*X*Y
def test_conjugate():
assert conjugate(A).is_commutative == False
assert (A*A).conjugate() == conjugate(A)**2
assert (A*B).conjugate() == conjugate(A)*conjugate(B)
assert (A*B**2).conjugate() == conjugate(A)*conjugate(B)**2
assert (A*B - B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
assert (A*B).conjugate() - (B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
def test_expand():
assert expand((A*B)**2) == A*B*A*B
assert expand(A*B - B*A) == A*B - B*A
assert expand((A*B/A)**2) == A*B*B/A
assert expand(B*A*(A + B)*B) == B*A**2*B + B*A*B**2
assert expand(B*A*(A + C)*B) == B*A**2*B + B*A*C*B
def test_factor():
assert factor(A*B - B*A) == A*B - B*A
def test_radsimp():
assert radsimp(A*B - B*A) == A*B - B*A
def test_simplify():
assert simplify(A*B - B*A) == A*B - B*A
def test_subs():
assert (x*y*A).subs(x*y, z) == A*z
assert (x*A*B).subs(x*A, C) == C*B
assert (x*A*x*x).subs(x**2*A, C) == x*C
assert (x*A*x*B).subs(x**2*A, C) == C*B
assert (A**2*B**2).subs(A*B**2, C) == A*C
assert (A*A*A + A*B*A).subs(A*A*A, C) == C + A*B*A
|
<commit_before><commit_msg>Add tests for noncommutative symbols.
- complex
- conjugate
- expand
- factor
- radsimp
- simplify
- subs<commit_after>
|
"""Tests for noncommutative symbols and expressions."""
from sympy import (
conjugate,
expand,
factor,
radsimp,
simplify,
symbols,
I,
)
from sympy.abc import x, y, z
A, B, C = symbols("A B C", commutative=False)
X, Y = symbols("X Y", commutative=False, real=True)
Z = X + I*Y
def test_complex():
assert Z.conjugate() == X - I*Y
assert (Z*Z.conjugate()).expand() == X**2 + Y**2 + I*Y*X - I*X*Y
def test_conjugate():
assert conjugate(A).is_commutative == False
assert (A*A).conjugate() == conjugate(A)**2
assert (A*B).conjugate() == conjugate(A)*conjugate(B)
assert (A*B**2).conjugate() == conjugate(A)*conjugate(B)**2
assert (A*B - B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
assert (A*B).conjugate() - (B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
def test_expand():
assert expand((A*B)**2) == A*B*A*B
assert expand(A*B - B*A) == A*B - B*A
assert expand((A*B/A)**2) == A*B*B/A
assert expand(B*A*(A + B)*B) == B*A**2*B + B*A*B**2
assert expand(B*A*(A + C)*B) == B*A**2*B + B*A*C*B
def test_factor():
assert factor(A*B - B*A) == A*B - B*A
def test_radsimp():
assert radsimp(A*B - B*A) == A*B - B*A
def test_simplify():
assert simplify(A*B - B*A) == A*B - B*A
def test_subs():
assert (x*y*A).subs(x*y, z) == A*z
assert (x*A*B).subs(x*A, C) == C*B
assert (x*A*x*x).subs(x**2*A, C) == x*C
assert (x*A*x*B).subs(x**2*A, C) == C*B
assert (A**2*B**2).subs(A*B**2, C) == A*C
assert (A*A*A + A*B*A).subs(A*A*A, C) == C + A*B*A
|
Add tests for noncommutative symbols.
- complex
- conjugate
- expand
- factor
- radsimp
- simplify
- subs"""Tests for noncommutative symbols and expressions."""
from sympy import (
conjugate,
expand,
factor,
radsimp,
simplify,
symbols,
I,
)
from sympy.abc import x, y, z
A, B, C = symbols("A B C", commutative=False)
X, Y = symbols("X Y", commutative=False, real=True)
Z = X + I*Y
def test_complex():
assert Z.conjugate() == X - I*Y
assert (Z*Z.conjugate()).expand() == X**2 + Y**2 + I*Y*X - I*X*Y
def test_conjugate():
assert conjugate(A).is_commutative == False
assert (A*A).conjugate() == conjugate(A)**2
assert (A*B).conjugate() == conjugate(A)*conjugate(B)
assert (A*B**2).conjugate() == conjugate(A)*conjugate(B)**2
assert (A*B - B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
assert (A*B).conjugate() - (B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
def test_expand():
assert expand((A*B)**2) == A*B*A*B
assert expand(A*B - B*A) == A*B - B*A
assert expand((A*B/A)**2) == A*B*B/A
assert expand(B*A*(A + B)*B) == B*A**2*B + B*A*B**2
assert expand(B*A*(A + C)*B) == B*A**2*B + B*A*C*B
def test_factor():
assert factor(A*B - B*A) == A*B - B*A
def test_radsimp():
assert radsimp(A*B - B*A) == A*B - B*A
def test_simplify():
assert simplify(A*B - B*A) == A*B - B*A
def test_subs():
assert (x*y*A).subs(x*y, z) == A*z
assert (x*A*B).subs(x*A, C) == C*B
assert (x*A*x*x).subs(x**2*A, C) == x*C
assert (x*A*x*B).subs(x**2*A, C) == C*B
assert (A**2*B**2).subs(A*B**2, C) == A*C
assert (A*A*A + A*B*A).subs(A*A*A, C) == C + A*B*A
|
<commit_before><commit_msg>Add tests for noncommutative symbols.
- complex
- conjugate
- expand
- factor
- radsimp
- simplify
- subs<commit_after>"""Tests for noncommutative symbols and expressions."""
from sympy import (
conjugate,
expand,
factor,
radsimp,
simplify,
symbols,
I,
)
from sympy.abc import x, y, z
A, B, C = symbols("A B C", commutative=False)
X, Y = symbols("X Y", commutative=False, real=True)
Z = X + I*Y
def test_complex():
assert Z.conjugate() == X - I*Y
assert (Z*Z.conjugate()).expand() == X**2 + Y**2 + I*Y*X - I*X*Y
def test_conjugate():
assert conjugate(A).is_commutative == False
assert (A*A).conjugate() == conjugate(A)**2
assert (A*B).conjugate() == conjugate(A)*conjugate(B)
assert (A*B**2).conjugate() == conjugate(A)*conjugate(B)**2
assert (A*B - B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
assert (A*B).conjugate() - (B*A).conjugate() == conjugate(A)*conjugate(B) - conjugate(B)*conjugate(A)
def test_expand():
assert expand((A*B)**2) == A*B*A*B
assert expand(A*B - B*A) == A*B - B*A
assert expand((A*B/A)**2) == A*B*B/A
assert expand(B*A*(A + B)*B) == B*A**2*B + B*A*B**2
assert expand(B*A*(A + C)*B) == B*A**2*B + B*A*C*B
def test_factor():
assert factor(A*B - B*A) == A*B - B*A
def test_radsimp():
assert radsimp(A*B - B*A) == A*B - B*A
def test_simplify():
assert simplify(A*B - B*A) == A*B - B*A
def test_subs():
assert (x*y*A).subs(x*y, z) == A*z
assert (x*A*B).subs(x*A, C) == C*B
assert (x*A*x*x).subs(x**2*A, C) == x*C
assert (x*A*x*B).subs(x**2*A, C) == C*B
assert (A**2*B**2).subs(A*B**2, C) == A*C
assert (A*A*A + A*B*A).subs(A*A*A, C) == C + A*B*A
|
|
0e3bbd218c5617ac915a19bbfb2707793c0f907b
|
scripts/first_admin_levels.py
|
scripts/first_admin_levels.py
|
#!/usr/bin/env python
import csv
import json
states = set()
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
for row in reader:
state = row[0]
if not state:
continue
states.add(state.upper())
print json.dumps(list(states), indent=2, separators=(',', ': '))
|
Add first admin levels script
|
Add first admin levels script
Connects #573.
|
Python
|
agpl-3.0
|
eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django
|
Add first admin levels script
Connects #573.
|
#!/usr/bin/env python
import csv
import json
states = set()
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
for row in reader:
state = row[0]
if not state:
continue
states.add(state.upper())
print json.dumps(list(states), indent=2, separators=(',', ': '))
|
<commit_before><commit_msg>Add first admin levels script
Connects #573.<commit_after>
|
#!/usr/bin/env python
import csv
import json
states = set()
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
for row in reader:
state = row[0]
if not state:
continue
states.add(state.upper())
print json.dumps(list(states), indent=2, separators=(',', ': '))
|
Add first admin levels script
Connects #573.#!/usr/bin/env python
import csv
import json
states = set()
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
for row in reader:
state = row[0]
if not state:
continue
states.add(state.upper())
print json.dumps(list(states), indent=2, separators=(',', ': '))
|
<commit_before><commit_msg>Add first admin levels script
Connects #573.<commit_after>#!/usr/bin/env python
import csv
import json
states = set()
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
for row in reader:
state = row[0]
if not state:
continue
states.add(state.upper())
print json.dumps(list(states), indent=2, separators=(',', ': '))
|
|
1b92aa6fae1c2e7b9ad232bd69cf87d720fb57aa
|
integration_tests/test_null_sink.py
|
integration_tests/test_null_sink.py
|
#! /usr/bin/python
from nose.tools import assert_false
import os
import tables
import numpy as np
from tools import check_cmd
""" Tests """
def test_null_sink():
""" Testing for null sink case without a source facility """
#Cyclus simulation input for null sink testing
sim_input = "./inputs/null_sink.xml"
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input]
check_cmd(cmd, '.', holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don't execute further commands
output = tables.open_file("./output_temp.h5", mode = "r")
paths = ["/Transactions", "/TransactedResources"] # this must contain tables to test
#No resource exchange is expected
for path in paths:
yield assert_false, output.__contains__(path)
output.close()
os.remove("./output_temp.h5")
|
Add tests for null sink developed previously in cycamore repository
|
Add tests for null sink developed previously in cycamore repository
|
Python
|
bsd-3-clause
|
gidden/cyclus,rwcarlsen/cyclus,mbmcgarry/cyclus,gidden/cyclus,rwcarlsen/cyclus,rwcarlsen/cyclus,hodger/cyclus,mbmcgarry/cyclus,hodger/cyclus,mbmcgarry/cyclus,Baaaaam/cyclus,gidden/cyclus,gidden/cyclus,hodger/cyclus,Baaaaam/cyclus,mbmcgarry/cyclus,hodger/cyclus,hodger/cyclus,Baaaaam/cyclus,rwcarlsen/cyclus
|
Add tests for null sink developed previously in cycamore repository
|
#! /usr/bin/python
from nose.tools import assert_false
import os
import tables
import numpy as np
from tools import check_cmd
""" Tests """
def test_null_sink():
""" Testing for null sink case without a source facility """
#Cyclus simulation input for null sink testing
sim_input = "./inputs/null_sink.xml"
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input]
check_cmd(cmd, '.', holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don't execute further commands
output = tables.open_file("./output_temp.h5", mode = "r")
paths = ["/Transactions", "/TransactedResources"] # this must contain tables to test
#No resource exchange is expected
for path in paths:
yield assert_false, output.__contains__(path)
output.close()
os.remove("./output_temp.h5")
|
<commit_before><commit_msg>Add tests for null sink developed previously in cycamore repository<commit_after>
|
#! /usr/bin/python
from nose.tools import assert_false
import os
import tables
import numpy as np
from tools import check_cmd
""" Tests """
def test_null_sink():
""" Testing for null sink case without a source facility """
#Cyclus simulation input for null sink testing
sim_input = "./inputs/null_sink.xml"
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input]
check_cmd(cmd, '.', holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don't execute further commands
output = tables.open_file("./output_temp.h5", mode = "r")
paths = ["/Transactions", "/TransactedResources"] # this must contain tables to test
#No resource exchange is expected
for path in paths:
yield assert_false, output.__contains__(path)
output.close()
os.remove("./output_temp.h5")
|
Add tests for null sink developed previously in cycamore repository#! /usr/bin/python
from nose.tools import assert_false
import os
import tables
import numpy as np
from tools import check_cmd
""" Tests """
def test_null_sink():
""" Testing for null sink case without a source facility """
#Cyclus simulation input for null sink testing
sim_input = "./inputs/null_sink.xml"
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input]
check_cmd(cmd, '.', holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don't execute further commands
output = tables.open_file("./output_temp.h5", mode = "r")
paths = ["/Transactions", "/TransactedResources"] # this must contain tables to test
#No resource exchange is expected
for path in paths:
yield assert_false, output.__contains__(path)
output.close()
os.remove("./output_temp.h5")
|
<commit_before><commit_msg>Add tests for null sink developed previously in cycamore repository<commit_after>#! /usr/bin/python
from nose.tools import assert_false
import os
import tables
import numpy as np
from tools import check_cmd
""" Tests """
def test_null_sink():
""" Testing for null sink case without a source facility """
#Cyclus simulation input for null sink testing
sim_input = "./inputs/null_sink.xml"
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input]
check_cmd(cmd, '.', holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don't execute further commands
output = tables.open_file("./output_temp.h5", mode = "r")
paths = ["/Transactions", "/TransactedResources"] # this must contain tables to test
#No resource exchange is expected
for path in paths:
yield assert_false, output.__contains__(path)
output.close()
os.remove("./output_temp.h5")
|
|
1a2e53744422b674e1b96107e79b824174e3f642
|
pattern_recognition.py
|
pattern_recognition.py
|
def constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
|
Define a basic interface and successfully recognize +2 pattern
|
Define a basic interface and successfully recognize +2 pattern
|
Python
|
mit
|
bpetering/python-pattern-recognition
|
Define a basic interface and successfully recognize +2 pattern
|
def constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
|
<commit_before><commit_msg>Define a basic interface and successfully recognize +2 pattern<commit_after>
|
def constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
|
Define a basic interface and successfully recognize +2 patterndef constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
|
<commit_before><commit_msg>Define a basic interface and successfully recognize +2 pattern<commit_after>def constant(diffs):
val = diffs.pop()
for d in diffs:
if d != val:
return False
return val
def pat1(seq): # consider two elements at a time
diffs = []
for i in xrange(1, len(seq)):
diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
return constant(diffs)
# representation of the pattern for pat1 was easy. how can we represent
# more complex patterns?
class Pattern(object):
(PAT_INT_ADD, PAT_INT_MULT, PAT_INT_POW) = range(3)
# TODO how does panda3d get constants?
def __init__(self, pat_type, pat_vals, prev_data, over=2, *args, **kwargs):
self.pat_type = pat_type
self.over = over
self.prev_data = prev_data
self.pat_vals = pat_vals
def next(self):
if self.pat_type == Pattern.PAT_INT_ADD:
tmp = self.prev_data[-1] + self.pat_vals[0] # TODO how much prev_data to keep?
self.prev_data.append(tmp)
return tmp
class PatternSeq(object):
def __init__(self, *args, **kwargs):
self.pattern = None
def have_pattern(self):
return self.pattern is not None
def infer(self, seq):
v = pat1(seq)
if v is not False:
self.pattern = Pattern(pat_type=Pattern.PAT_INT_ADD, pat_vals=[v], prev_data=seq) # TODO generalize
else:
raise Exception("NYI")
def extend(self, n):
if self.have_pattern():
x = []
for i in xrange(n):
x.append(self.pattern.next())
return x
else:
raise Exception("ALSDKJLASKJD")
# def pat2(seq): # consider three elements at a time
# diffs = []
# for i in xrange(1, len(seq)):
# diffs.append( seq[i] - seq[i-1] ) # implicit directionality - factor out
# val = constant(diffs)
# if val is False:
# print 'no pattern'
# else:
# print val
# TODO look at sympy interface, requests interface
# TODO detect pattern with certain number of anomalous values:
# e.g. 2,4,6,8,11
ps = PatternSeq()
ps.infer([2,4,6,8,10])
print "have pattern:", ps.have_pattern()
print "next 10 vals:", ps.extend(10)
|
|
a482c3e938db1d7fc15d0c28da0fbd4f230803af
|
lab_mip.py
|
lab_mip.py
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See then
# License for the specific language governing permissions and limitations
# under the License.
from pulp import *
import sys
x = []
y = []
thismodule = sys.modules[__name__]
for i in range(6):
name = 'x{0}'.format(i + 1)
var = LpVariable(name, 0, None)
setattr(thismodule, name, var)
x.append(var)
for i in range(6):
name = 'y{0}'.format(i + 1)
var = LpVariable(name, 0, 1, 'Binary')
setattr(thismodule, name, var)
y.append(var)
# defines the problem
prob = LpProblem("problem", LpMaximize)
# defines the objective function to maximize
prob += x1 + x2 + x3 + x4 + x5 + x6, 'Sum of spaces'
# defines the constraints
prob += x1 + x2 <= 100, 'First disk'
prob += x3 + x4 <= 100, 'Second disk'
prob += x5 + x6 <= 100, 'Third disk'
prob += y1 + y3 + y5 == 2, 'Replication factor'
prob += x1 + x3 + x5 >= 20, 'First min size'
prob += x2 + x4 + x6 >= 101, 'Second min size'
prob += x1 - x5 == 0, 'Sizes equality for raid'
# Convert from Float to Integer
for i, x_ in enumerate(x):
y_ = y[i]
prob += y_ - x_ <= 0
prob += x_ - 100 * y_ <= 0
# solve the problem
status = prob.solve(GLPK(msg=1))
def print_vector(vector, prefix, n=2):
for i, v in enumerate(vector):
sys.stdout.write('{0}_{1} = {2}'.format(prefix, i + 1, value(v)))
if (i + 1) % n:
sys.stdout.write('\t')
else:
sys.stdout.write('\n')
print_vector(x, 'x')
print_vector(y, 'y')
|
Add file with usage of Mixed Integer Programming with Pulp
|
Add file with usage of Mixed Integer Programming with Pulp
|
Python
|
apache-2.0
|
rustyrobot/bareon-dynamic-allocator,rustyrobot/bareon-allocator,rustyrobot/bareon-allocator,rustyrobot/bareon-dynamic-allocator
|
Add file with usage of Mixed Integer Programming with Pulp
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See then
# License for the specific language governing permissions and limitations
# under the License.
from pulp import *
import sys
x = []
y = []
thismodule = sys.modules[__name__]
for i in range(6):
name = 'x{0}'.format(i + 1)
var = LpVariable(name, 0, None)
setattr(thismodule, name, var)
x.append(var)
for i in range(6):
name = 'y{0}'.format(i + 1)
var = LpVariable(name, 0, 1, 'Binary')
setattr(thismodule, name, var)
y.append(var)
# defines the problem
prob = LpProblem("problem", LpMaximize)
# defines the objective function to maximize
prob += x1 + x2 + x3 + x4 + x5 + x6, 'Sum of spaces'
# defines the constraints
prob += x1 + x2 <= 100, 'First disk'
prob += x3 + x4 <= 100, 'Second disk'
prob += x5 + x6 <= 100, 'Third disk'
prob += y1 + y3 + y5 == 2, 'Replication factor'
prob += x1 + x3 + x5 >= 20, 'First min size'
prob += x2 + x4 + x6 >= 101, 'Second min size'
prob += x1 - x5 == 0, 'Sizes equality for raid'
# Convert from Float to Integer
for i, x_ in enumerate(x):
y_ = y[i]
prob += y_ - x_ <= 0
prob += x_ - 100 * y_ <= 0
# solve the problem
status = prob.solve(GLPK(msg=1))
def print_vector(vector, prefix, n=2):
for i, v in enumerate(vector):
sys.stdout.write('{0}_{1} = {2}'.format(prefix, i + 1, value(v)))
if (i + 1) % n:
sys.stdout.write('\t')
else:
sys.stdout.write('\n')
print_vector(x, 'x')
print_vector(y, 'y')
|
<commit_before><commit_msg>Add file with usage of Mixed Integer Programming with Pulp<commit_after>
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See then
# License for the specific language governing permissions and limitations
# under the License.
from pulp import *
import sys
x = []
y = []
thismodule = sys.modules[__name__]
for i in range(6):
name = 'x{0}'.format(i + 1)
var = LpVariable(name, 0, None)
setattr(thismodule, name, var)
x.append(var)
for i in range(6):
name = 'y{0}'.format(i + 1)
var = LpVariable(name, 0, 1, 'Binary')
setattr(thismodule, name, var)
y.append(var)
# defines the problem
prob = LpProblem("problem", LpMaximize)
# defines the objective function to maximize
prob += x1 + x2 + x3 + x4 + x5 + x6, 'Sum of spaces'
# defines the constraints
prob += x1 + x2 <= 100, 'First disk'
prob += x3 + x4 <= 100, 'Second disk'
prob += x5 + x6 <= 100, 'Third disk'
prob += y1 + y3 + y5 == 2, 'Replication factor'
prob += x1 + x3 + x5 >= 20, 'First min size'
prob += x2 + x4 + x6 >= 101, 'Second min size'
prob += x1 - x5 == 0, 'Sizes equality for raid'
# Convert from Float to Integer
for i, x_ in enumerate(x):
y_ = y[i]
prob += y_ - x_ <= 0
prob += x_ - 100 * y_ <= 0
# solve the problem
status = prob.solve(GLPK(msg=1))
def print_vector(vector, prefix, n=2):
for i, v in enumerate(vector):
sys.stdout.write('{0}_{1} = {2}'.format(prefix, i + 1, value(v)))
if (i + 1) % n:
sys.stdout.write('\t')
else:
sys.stdout.write('\n')
print_vector(x, 'x')
print_vector(y, 'y')
|
Add file with usage of Mixed Integer Programming with Pulp# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See then
# License for the specific language governing permissions and limitations
# under the License.
from pulp import *
import sys
x = []
y = []
thismodule = sys.modules[__name__]
for i in range(6):
name = 'x{0}'.format(i + 1)
var = LpVariable(name, 0, None)
setattr(thismodule, name, var)
x.append(var)
for i in range(6):
name = 'y{0}'.format(i + 1)
var = LpVariable(name, 0, 1, 'Binary')
setattr(thismodule, name, var)
y.append(var)
# defines the problem
prob = LpProblem("problem", LpMaximize)
# defines the objective function to maximize
prob += x1 + x2 + x3 + x4 + x5 + x6, 'Sum of spaces'
# defines the constraints
prob += x1 + x2 <= 100, 'First disk'
prob += x3 + x4 <= 100, 'Second disk'
prob += x5 + x6 <= 100, 'Third disk'
prob += y1 + y3 + y5 == 2, 'Replication factor'
prob += x1 + x3 + x5 >= 20, 'First min size'
prob += x2 + x4 + x6 >= 101, 'Second min size'
prob += x1 - x5 == 0, 'Sizes equality for raid'
# Convert from Float to Integer
for i, x_ in enumerate(x):
y_ = y[i]
prob += y_ - x_ <= 0
prob += x_ - 100 * y_ <= 0
# solve the problem
status = prob.solve(GLPK(msg=1))
def print_vector(vector, prefix, n=2):
for i, v in enumerate(vector):
sys.stdout.write('{0}_{1} = {2}'.format(prefix, i + 1, value(v)))
if (i + 1) % n:
sys.stdout.write('\t')
else:
sys.stdout.write('\n')
print_vector(x, 'x')
print_vector(y, 'y')
|
<commit_before><commit_msg>Add file with usage of Mixed Integer Programming with Pulp<commit_after># Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See then
# License for the specific language governing permissions and limitations
# under the License.
from pulp import *
import sys
x = []
y = []
thismodule = sys.modules[__name__]
for i in range(6):
name = 'x{0}'.format(i + 1)
var = LpVariable(name, 0, None)
setattr(thismodule, name, var)
x.append(var)
for i in range(6):
name = 'y{0}'.format(i + 1)
var = LpVariable(name, 0, 1, 'Binary')
setattr(thismodule, name, var)
y.append(var)
# defines the problem
prob = LpProblem("problem", LpMaximize)
# defines the objective function to maximize
prob += x1 + x2 + x3 + x4 + x5 + x6, 'Sum of spaces'
# defines the constraints
prob += x1 + x2 <= 100, 'First disk'
prob += x3 + x4 <= 100, 'Second disk'
prob += x5 + x6 <= 100, 'Third disk'
prob += y1 + y3 + y5 == 2, 'Replication factor'
prob += x1 + x3 + x5 >= 20, 'First min size'
prob += x2 + x4 + x6 >= 101, 'Second min size'
prob += x1 - x5 == 0, 'Sizes equality for raid'
# Convert from Float to Integer
for i, x_ in enumerate(x):
y_ = y[i]
prob += y_ - x_ <= 0
prob += x_ - 100 * y_ <= 0
# solve the problem
status = prob.solve(GLPK(msg=1))
def print_vector(vector, prefix, n=2):
for i, v in enumerate(vector):
sys.stdout.write('{0}_{1} = {2}'.format(prefix, i + 1, value(v)))
if (i + 1) % n:
sys.stdout.write('\t')
else:
sys.stdout.write('\n')
print_vector(x, 'x')
print_vector(y, 'y')
|
|
a969f342137485dbb6f212bc1aa320770aac1421
|
cub200_2011_dataset.py
|
cub200_2011_dataset.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 00:57:05 2017
@author: sakurai
"""
from fuel.datasets import H5PYDataset
from fuel.utils import find_in_data_path
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
super(Cub200_2011Dataset, self).__init__(
file_or_path=find_in_data_path(self._filename),
which_sets=which_sets, **kwargs)
def load_as_ndarray(which_sets=['train', 'test']):
datasets = []
for split in which_sets:
data = Cub200_2011Dataset([split], load_in_memory=True).data_sources
datasets.append(data)
return datasets
if __name__ == '__main__':
dataset = Cub200_2011Dataset(['train'])
st = DataStream(
dataset, iteration_scheme=SequentialScheme(dataset.num_examples, 1))
it = st.get_epoch_iterator()
it.next()
|
Add fuel dataset for CUB200_2011
|
Add fuel dataset for CUB200_2011
|
Python
|
mit
|
ronekko/deep_metric_learning
|
Add fuel dataset for CUB200_2011
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 00:57:05 2017
@author: sakurai
"""
from fuel.datasets import H5PYDataset
from fuel.utils import find_in_data_path
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
super(Cub200_2011Dataset, self).__init__(
file_or_path=find_in_data_path(self._filename),
which_sets=which_sets, **kwargs)
def load_as_ndarray(which_sets=['train', 'test']):
datasets = []
for split in which_sets:
data = Cub200_2011Dataset([split], load_in_memory=True).data_sources
datasets.append(data)
return datasets
if __name__ == '__main__':
dataset = Cub200_2011Dataset(['train'])
st = DataStream(
dataset, iteration_scheme=SequentialScheme(dataset.num_examples, 1))
it = st.get_epoch_iterator()
it.next()
|
<commit_before><commit_msg>Add fuel dataset for CUB200_2011<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 00:57:05 2017
@author: sakurai
"""
from fuel.datasets import H5PYDataset
from fuel.utils import find_in_data_path
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
super(Cub200_2011Dataset, self).__init__(
file_or_path=find_in_data_path(self._filename),
which_sets=which_sets, **kwargs)
def load_as_ndarray(which_sets=['train', 'test']):
datasets = []
for split in which_sets:
data = Cub200_2011Dataset([split], load_in_memory=True).data_sources
datasets.append(data)
return datasets
if __name__ == '__main__':
dataset = Cub200_2011Dataset(['train'])
st = DataStream(
dataset, iteration_scheme=SequentialScheme(dataset.num_examples, 1))
it = st.get_epoch_iterator()
it.next()
|
Add fuel dataset for CUB200_2011# -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 00:57:05 2017
@author: sakurai
"""
from fuel.datasets import H5PYDataset
from fuel.utils import find_in_data_path
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
super(Cub200_2011Dataset, self).__init__(
file_or_path=find_in_data_path(self._filename),
which_sets=which_sets, **kwargs)
def load_as_ndarray(which_sets=['train', 'test']):
datasets = []
for split in which_sets:
data = Cub200_2011Dataset([split], load_in_memory=True).data_sources
datasets.append(data)
return datasets
if __name__ == '__main__':
dataset = Cub200_2011Dataset(['train'])
st = DataStream(
dataset, iteration_scheme=SequentialScheme(dataset.num_examples, 1))
it = st.get_epoch_iterator()
it.next()
|
<commit_before><commit_msg>Add fuel dataset for CUB200_2011<commit_after># -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 00:57:05 2017
@author: sakurai
"""
from fuel.datasets import H5PYDataset
from fuel.utils import find_in_data_path
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
super(Cub200_2011Dataset, self).__init__(
file_or_path=find_in_data_path(self._filename),
which_sets=which_sets, **kwargs)
def load_as_ndarray(which_sets=['train', 'test']):
datasets = []
for split in which_sets:
data = Cub200_2011Dataset([split], load_in_memory=True).data_sources
datasets.append(data)
return datasets
if __name__ == '__main__':
dataset = Cub200_2011Dataset(['train'])
st = DataStream(
dataset, iteration_scheme=SequentialScheme(dataset.num_examples, 1))
it = st.get_epoch_iterator()
it.next()
|
|
627852019b915f9d34f7d036c90f6a1d8307df53
|
tests/basics/builtin_compile.py
|
tests/basics/builtin_compile.py
|
# test compile builtin
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
c = compile("print(x)", "file", "exec")
try:
exec(c)
except NameError:
print("NameError")
x = 1
exec(c)
exec(c, {"x":2})
exec(c, {}, {"x":3})
|
Add test for compile builtin.
|
tests: Add test for compile builtin.
|
Python
|
mit
|
tralamazza/micropython,SungEun-Steve-Kim/test-mp,swegener/micropython,pfalcon/micropython,turbinenreiter/micropython,ceramos/micropython,ericsnowcurrently/micropython,cwyark/micropython,henriknelson/micropython,torwag/micropython,AriZuu/micropython,tobbad/micropython,matthewelse/micropython,Timmenem/micropython,KISSMonX/micropython,blazewicz/micropython,lbattraw/micropython,dinau/micropython,vriera/micropython,slzatz/micropython,adafruit/micropython,dhylands/micropython,martinribelotta/micropython,suda/micropython,ganshun666/micropython,bvernoux/micropython,matthewelse/micropython,supergis/micropython,jlillest/micropython,warner83/micropython,matthewelse/micropython,bvernoux/micropython,pozetroninc/micropython,omtinez/micropython,ericsnowcurrently/micropython,alex-robbins/micropython,Timmenem/micropython,blmorris/micropython,neilh10/micropython,xhat/micropython,SungEun-Steve-Kim/test-mp,noahwilliamsson/micropython,lowRISC/micropython,xyb/micropython,ruffy91/micropython,supergis/micropython,galenhz/micropython,noahwilliamsson/micropython,jmarcelino/pycom-micropython,vriera/micropython,suda/micropython,lowRISC/micropython,heisewangluo/micropython,mpalomer/micropython,henriknelson/micropython,vitiral/micropython,praemdonck/micropython,ryannathans/micropython,neilh10/micropython,lbattraw/micropython,swegener/micropython,firstval/micropython,danicampora/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,xhat/micropython,matthewelse/micropython,ruffy91/micropython,mpalomer/micropython,mianos/micropython,AriZuu/micropython,tuc-osg/micropython,ceramos/micropython,xuxiaoxin/micropython,cwyark/micropython,suda/micropython,blmorris/micropython,adafruit/circuitpython,stonegithubs/micropython,heisewangluo/micropython,infinnovation/micropython,mgyenik/micropython,warner83/micropython,dhylands/micropython,ceramos/micropython,xuxiaoxin/micropython,trezor/micropython,deshipu/micropython,galenhz/micropython,danicampora/micropython,toolmacher/micropython,adamkh/micropython,aethaniel/micropython,selste/micropython,MrSurly/micropython-esp32,xyb/micropython,ernesto-g/micropython,danicampora/micropython,selste/micropython,skybird6672/micropython,pfalcon/micropython,jimkmc/micropython,mgyenik/micropython,tralamazza/micropython,trezor/micropython,infinnovation/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,TDAbboud/micropython,drrk/micropython,lbattraw/micropython,tobbad/micropython,mpalomer/micropython,hosaka/micropython,alex-march/micropython,noahchense/micropython,adafruit/circuitpython,mgyenik/micropython,pramasoul/micropython,slzatz/micropython,firstval/micropython,PappaPeppar/micropython,orionrobots/micropython,utopiaprince/micropython,noahchense/micropython,omtinez/micropython,orionrobots/micropython,skybird6672/micropython,mhoffma/micropython,adafruit/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,MrSurly/micropython,turbinenreiter/micropython,cnoviello/micropython,blazewicz/micropython,adamkh/micropython,turbinenreiter/micropython,tdautc19841202/micropython,deshipu/micropython,SungEun-Steve-Kim/test-mp,MrSurly/micropython,tdautc19841202/micropython,MrSurly/micropython-esp32,martinribelotta/micropython,aethaniel/micropython,SungEun-Steve-Kim/test-mp,xhat/micropython,misterdanb/micropython,cloudformdesign/micropython,Vogtinator/micropython,heisewangluo/micropython,alex-robbins/micropython,misterdanb/micropython,vriera/micropython,ahotam/micropython,paul-xxx/micropython,pramasoul/micropython,mianos/micropython,TDAbboud/micropython,EcmaXp/micropython,cwyark/micropython,SHA2017-badge/micropython-esp32,ahotam/micropython,dinau/micropython,blazewicz/micropython,suda/micropython,cloudformdesign/micropython,supergis/micropython,alex-robbins/micropython,MrSurly/micropython,adamkh/micropython,matthewelse/micropython,mhoffma/micropython,ryannathans/micropython,KISSMonX/micropython,slzatz/micropython,cloudformdesign/micropython,vitiral/micropython,praemdonck/micropython,redbear/micropython,selste/micropython,xhat/micropython,Peetz0r/micropython-esp32,noahchense/micropython,dxxb/micropython,aethaniel/micropython,kerneltask/micropython,cwyark/micropython,skybird6672/micropython,martinribelotta/micropython,kostyll/micropython,cnoviello/micropython,noahwilliamsson/micropython,feilongfl/micropython,noahchense/micropython,deshipu/micropython,ericsnowcurrently/micropython,kerneltask/micropython,drrk/micropython,Vogtinator/micropython,toolmacher/micropython,noahwilliamsson/micropython,mhoffma/micropython,AriZuu/micropython,henriknelson/micropython,puuu/micropython,toolmacher/micropython,MrSurly/micropython-esp32,emfcamp/micropython,noahchense/micropython,mhoffma/micropython,emfcamp/micropython,hiway/micropython,deshipu/micropython,ganshun666/micropython,vitiral/micropython,blazewicz/micropython,tobbad/micropython,kostyll/micropython,deshipu/micropython,chrisdearman/micropython,ruffy91/micropython,praemdonck/micropython,aethaniel/micropython,cloudformdesign/micropython,dhylands/micropython,swegener/micropython,adafruit/circuitpython,slzatz/micropython,puuu/micropython,utopiaprince/micropython,hiway/micropython,dxxb/micropython,emfcamp/micropython,ceramos/micropython,ganshun666/micropython,adamkh/micropython,rubencabrera/micropython,praemdonck/micropython,hosaka/micropython,pramasoul/micropython,redbear/micropython,suda/micropython,EcmaXp/micropython,jlillest/micropython,misterdanb/micropython,utopiaprince/micropython,hosaka/micropython,kerneltask/micropython,redbear/micropython,jmarcelino/pycom-micropython,HenrikSolver/micropython,oopy/micropython,tobbad/micropython,neilh10/micropython,heisewangluo/micropython,jmarcelino/pycom-micropython,Vogtinator/micropython,mgyenik/micropython,henriknelson/micropython,ernesto-g/micropython,skybird6672/micropython,cnoviello/micropython,ceramos/micropython,martinribelotta/micropython,kerneltask/micropython,PappaPeppar/micropython,pozetroninc/micropython,swegener/micropython,KISSMonX/micropython,lowRISC/micropython,xuxiaoxin/micropython,omtinez/micropython,ChuckM/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,pfalcon/micropython,ChuckM/micropython,xuxiaoxin/micropython,tuc-osg/micropython,rubencabrera/micropython,omtinez/micropython,mhoffma/micropython,emfcamp/micropython,tdautc19841202/micropython,chrisdearman/micropython,TDAbboud/micropython,SHA2017-badge/micropython-esp32,jimkmc/micropython,lbattraw/micropython,pramasoul/micropython,drrk/micropython,emfcamp/micropython,micropython/micropython-esp32,kostyll/micropython,warner83/micropython,feilongfl/micropython,puuu/micropython,puuu/micropython,mianos/micropython,dhylands/micropython,blmorris/micropython,chrisdearman/micropython,bvernoux/micropython,xuxiaoxin/micropython,slzatz/micropython,micropython/micropython-esp32,trezor/micropython,ryannathans/micropython,hosaka/micropython,trezor/micropython,neilh10/micropython,xyb/micropython,infinnovation/micropython,ganshun666/micropython,ernesto-g/micropython,adafruit/micropython,hiway/micropython,martinribelotta/micropython,oopy/micropython,alex-march/micropython,praemdonck/micropython,neilh10/micropython,dxxb/micropython,dxxb/micropython,dinau/micropython,skybird6672/micropython,AriZuu/micropython,tuc-osg/micropython,rubencabrera/micropython,oopy/micropython,alex-robbins/micropython,dhylands/micropython,kerneltask/micropython,cnoviello/micropython,tuc-osg/micropython,xyb/micropython,tralamazza/micropython,rubencabrera/micropython,vriera/micropython,pfalcon/micropython,AriZuu/micropython,EcmaXp/micropython,swegener/micropython,supergis/micropython,ahotam/micropython,Peetz0r/micropython-esp32,jimkmc/micropython,selste/micropython,EcmaXp/micropython,trezor/micropython,dinau/micropython,pozetroninc/micropython,Peetz0r/micropython-esp32,EcmaXp/micropython,feilongfl/micropython,HenrikSolver/micropython,kostyll/micropython,adafruit/circuitpython,tralamazza/micropython,Timmenem/micropython,MrSurly/micropython,SungEun-Steve-Kim/test-mp,PappaPeppar/micropython,mgyenik/micropython,ahotam/micropython,cnoviello/micropython,jlillest/micropython,hosaka/micropython,ericsnowcurrently/micropython,infinnovation/micropython,paul-xxx/micropython,KISSMonX/micropython,dmazzella/micropython,orionrobots/micropython,mianos/micropython,adafruit/micropython,noahwilliamsson/micropython,adamkh/micropython,ryannathans/micropython,tobbad/micropython,alex-robbins/micropython,paul-xxx/micropython,galenhz/micropython,ganshun666/micropython,mpalomer/micropython,torwag/micropython,stonegithubs/micropython,HenrikSolver/micropython,misterdanb/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,aethaniel/micropython,ahotam/micropython,feilongfl/micropython,pozetroninc/micropython,TDAbboud/micropython,oopy/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,ruffy91/micropython,cwyark/micropython,firstval/micropython,utopiaprince/micropython,ryannathans/micropython,mianos/micropython,toolmacher/micropython,lowRISC/micropython,paul-xxx/micropython,paul-xxx/micropython,HenrikSolver/micropython,turbinenreiter/micropython,lbattraw/micropython,alex-march/micropython,ernesto-g/micropython,redbear/micropython,galenhz/micropython,firstval/micropython,ruffy91/micropython,ChuckM/micropython,jimkmc/micropython,heisewangluo/micropython,torwag/micropython,bvernoux/micropython,chrisdearman/micropython,pramasoul/micropython,omtinez/micropython,vitiral/micropython,galenhz/micropython,hiway/micropython,torwag/micropython,redbear/micropython,chrisdearman/micropython,Vogtinator/micropython,firstval/micropython,torwag/micropython,lowRISC/micropython,vitiral/micropython,feilongfl/micropython,rubencabrera/micropython,stonegithubs/micropython,selste/micropython,misterdanb/micropython,PappaPeppar/micropython,tdautc19841202/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,toolmacher/micropython,KISSMonX/micropython,xhat/micropython,turbinenreiter/micropython,ChuckM/micropython,alex-march/micropython,ernesto-g/micropython,puuu/micropython,Timmenem/micropython,jimkmc/micropython,adafruit/micropython,dmazzella/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,henriknelson/micropython,oopy/micropython,tdautc19841202/micropython,adafruit/circuitpython,dmazzella/micropython,danicampora/micropython,xyb/micropython,jlillest/micropython,infinnovation/micropython,mpalomer/micropython,orionrobots/micropython,Vogtinator/micropython,stonegithubs/micropython,blmorris/micropython,orionrobots/micropython,TDAbboud/micropython,pozetroninc/micropython,danicampora/micropython,pfalcon/micropython,utopiaprince/micropython,dmazzella/micropython,vriera/micropython,tuc-osg/micropython,alex-march/micropython,hiway/micropython,matthewelse/micropython,warner83/micropython,warner83/micropython,ChuckM/micropython,ericsnowcurrently/micropython,cloudformdesign/micropython,dxxb/micropython,drrk/micropython,jlillest/micropython,kostyll/micropython,blmorris/micropython,drrk/micropython,micropython/micropython-esp32,jmarcelino/pycom-micropython,micropython/micropython-esp32,MrSurly/micropython,stonegithubs/micropython,dinau/micropython
|
tests: Add test for compile builtin.
|
# test compile builtin
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
c = compile("print(x)", "file", "exec")
try:
exec(c)
except NameError:
print("NameError")
x = 1
exec(c)
exec(c, {"x":2})
exec(c, {}, {"x":3})
|
<commit_before><commit_msg>tests: Add test for compile builtin.<commit_after>
|
# test compile builtin
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
c = compile("print(x)", "file", "exec")
try:
exec(c)
except NameError:
print("NameError")
x = 1
exec(c)
exec(c, {"x":2})
exec(c, {}, {"x":3})
|
tests: Add test for compile builtin.# test compile builtin
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
c = compile("print(x)", "file", "exec")
try:
exec(c)
except NameError:
print("NameError")
x = 1
exec(c)
exec(c, {"x":2})
exec(c, {}, {"x":3})
|
<commit_before><commit_msg>tests: Add test for compile builtin.<commit_after># test compile builtin
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
c = compile("print(x)", "file", "exec")
try:
exec(c)
except NameError:
print("NameError")
x = 1
exec(c)
exec(c, {"x":2})
exec(c, {}, {"x":3})
|
|
667fb50a3bccaa3f47fc21ec83c1ba2543605c08
|
tests/unit_tests/test_source_file.py
|
tests/unit_tests/test_source_file.py
|
from random import random
import h5py
import numpy as np
import openmc
def test_source_file(run_in_tmpdir):
# Create source particles
source = []
n = 1000
for i in range(n):
source.append(openmc.SourceParticle(
r=(random(), i, 0),
u=(0., 0., 1.),
E=float(n - i),
))
# Create source file
openmc.write_source_file(source, 'test_source.h5')
# Get array of source particles from file
with h5py.File('test_source.h5', 'r') as fh:
arr = fh['source_bank'][...]
# Ensure data is consistent
r = arr['r']
assert np.all((r['x'] > 0.0) & (r['x'] < 1.0))
assert np.all(r['y'] == np.arange(1000))
assert np.all(r['z'] == 0.0)
u = arr['u']
assert np.all(u['x'] == 0.0)
assert np.all(u['y'] == 0.0)
assert np.all(u['z'] == 1.0)
assert np.all(arr['E'] == n - np.arange(n))
assert np.all(arr['wgt'] == 1.0)
assert np.all(arr['delayed_group'] == 0)
assert np.all(arr['particle'] == 0)
|
Add test for source file generation
|
Add test for source file generation
|
Python
|
mit
|
amandalund/openmc,mit-crpg/openmc,paulromano/openmc,paulromano/openmc,walshjon/openmc,shikhar413/openmc,mit-crpg/openmc,amandalund/openmc,mit-crpg/openmc,shikhar413/openmc,paulromano/openmc,walshjon/openmc,shikhar413/openmc,paulromano/openmc,shikhar413/openmc,amandalund/openmc,mit-crpg/openmc,walshjon/openmc,amandalund/openmc,walshjon/openmc
|
Add test for source file generation
|
from random import random
import h5py
import numpy as np
import openmc
def test_source_file(run_in_tmpdir):
# Create source particles
source = []
n = 1000
for i in range(n):
source.append(openmc.SourceParticle(
r=(random(), i, 0),
u=(0., 0., 1.),
E=float(n - i),
))
# Create source file
openmc.write_source_file(source, 'test_source.h5')
# Get array of source particles from file
with h5py.File('test_source.h5', 'r') as fh:
arr = fh['source_bank'][...]
# Ensure data is consistent
r = arr['r']
assert np.all((r['x'] > 0.0) & (r['x'] < 1.0))
assert np.all(r['y'] == np.arange(1000))
assert np.all(r['z'] == 0.0)
u = arr['u']
assert np.all(u['x'] == 0.0)
assert np.all(u['y'] == 0.0)
assert np.all(u['z'] == 1.0)
assert np.all(arr['E'] == n - np.arange(n))
assert np.all(arr['wgt'] == 1.0)
assert np.all(arr['delayed_group'] == 0)
assert np.all(arr['particle'] == 0)
|
<commit_before><commit_msg>Add test for source file generation<commit_after>
|
from random import random
import h5py
import numpy as np
import openmc
def test_source_file(run_in_tmpdir):
# Create source particles
source = []
n = 1000
for i in range(n):
source.append(openmc.SourceParticle(
r=(random(), i, 0),
u=(0., 0., 1.),
E=float(n - i),
))
# Create source file
openmc.write_source_file(source, 'test_source.h5')
# Get array of source particles from file
with h5py.File('test_source.h5', 'r') as fh:
arr = fh['source_bank'][...]
# Ensure data is consistent
r = arr['r']
assert np.all((r['x'] > 0.0) & (r['x'] < 1.0))
assert np.all(r['y'] == np.arange(1000))
assert np.all(r['z'] == 0.0)
u = arr['u']
assert np.all(u['x'] == 0.0)
assert np.all(u['y'] == 0.0)
assert np.all(u['z'] == 1.0)
assert np.all(arr['E'] == n - np.arange(n))
assert np.all(arr['wgt'] == 1.0)
assert np.all(arr['delayed_group'] == 0)
assert np.all(arr['particle'] == 0)
|
Add test for source file generationfrom random import random
import h5py
import numpy as np
import openmc
def test_source_file(run_in_tmpdir):
# Create source particles
source = []
n = 1000
for i in range(n):
source.append(openmc.SourceParticle(
r=(random(), i, 0),
u=(0., 0., 1.),
E=float(n - i),
))
# Create source file
openmc.write_source_file(source, 'test_source.h5')
# Get array of source particles from file
with h5py.File('test_source.h5', 'r') as fh:
arr = fh['source_bank'][...]
# Ensure data is consistent
r = arr['r']
assert np.all((r['x'] > 0.0) & (r['x'] < 1.0))
assert np.all(r['y'] == np.arange(1000))
assert np.all(r['z'] == 0.0)
u = arr['u']
assert np.all(u['x'] == 0.0)
assert np.all(u['y'] == 0.0)
assert np.all(u['z'] == 1.0)
assert np.all(arr['E'] == n - np.arange(n))
assert np.all(arr['wgt'] == 1.0)
assert np.all(arr['delayed_group'] == 0)
assert np.all(arr['particle'] == 0)
|
<commit_before><commit_msg>Add test for source file generation<commit_after>from random import random
import h5py
import numpy as np
import openmc
def test_source_file(run_in_tmpdir):
# Create source particles
source = []
n = 1000
for i in range(n):
source.append(openmc.SourceParticle(
r=(random(), i, 0),
u=(0., 0., 1.),
E=float(n - i),
))
# Create source file
openmc.write_source_file(source, 'test_source.h5')
# Get array of source particles from file
with h5py.File('test_source.h5', 'r') as fh:
arr = fh['source_bank'][...]
# Ensure data is consistent
r = arr['r']
assert np.all((r['x'] > 0.0) & (r['x'] < 1.0))
assert np.all(r['y'] == np.arange(1000))
assert np.all(r['z'] == 0.0)
u = arr['u']
assert np.all(u['x'] == 0.0)
assert np.all(u['y'] == 0.0)
assert np.all(u['z'] == 1.0)
assert np.all(arr['E'] == n - np.arange(n))
assert np.all(arr['wgt'] == 1.0)
assert np.all(arr['delayed_group'] == 0)
assert np.all(arr['particle'] == 0)
|
|
0cc7ef081380a644b04e6150dcab02e8731d1800
|
doc/deployer/test_export_epub.py
|
doc/deployer/test_export_epub.py
|
from gnowsys_ndf.ndf.models import *
from gnowsys_ndf.ndf.views.export_to_epub import *
n = node_collection.one({'_id': ObjectId('5752b5392e01310424dcf6cc')})
create_epub(n.name, n.collection_dict)
|
Test export epub script added
|
Test export epub script added
|
Python
|
agpl-3.0
|
gnowledge/gstudio,gnowledge/gstudio,gnowledge/gstudio,gnowledge/gstudio,gnowledge/gstudio
|
Test export epub script added
|
from gnowsys_ndf.ndf.models import *
from gnowsys_ndf.ndf.views.export_to_epub import *
n = node_collection.one({'_id': ObjectId('5752b5392e01310424dcf6cc')})
create_epub(n.name, n.collection_dict)
|
<commit_before><commit_msg>Test export epub script added<commit_after>
|
from gnowsys_ndf.ndf.models import *
from gnowsys_ndf.ndf.views.export_to_epub import *
n = node_collection.one({'_id': ObjectId('5752b5392e01310424dcf6cc')})
create_epub(n.name, n.collection_dict)
|
Test export epub script addedfrom gnowsys_ndf.ndf.models import *
from gnowsys_ndf.ndf.views.export_to_epub import *
n = node_collection.one({'_id': ObjectId('5752b5392e01310424dcf6cc')})
create_epub(n.name, n.collection_dict)
|
<commit_before><commit_msg>Test export epub script added<commit_after>from gnowsys_ndf.ndf.models import *
from gnowsys_ndf.ndf.views.export_to_epub import *
n = node_collection.one({'_id': ObjectId('5752b5392e01310424dcf6cc')})
create_epub(n.name, n.collection_dict)
|
|
99c81317c10dbcc737497d7ec89d165e2f63791c
|
defaults.py
|
defaults.py
|
ARDUINO_SENSOR_TYPE="arduino"
SIMULATED_SENSOR_TYPE="simulated"
AUTO_DETERMINE_SENSOR="auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION="local"
UDP_TRANSPORT_TYPE="udp"
#DEFAULT_DESTINATION_PORT="_sensors._udp"
#DEFAULT_DESTINATION_HOST="192.168.56.1"
DEFAULT_DESTINATION_HOST="" # listen on all interfaces
DEFAULT_DESTINATION_PORT=59999
DEFAULT_BAUD=38400
|
ARDUINO_SENSOR_TYPE = "arduino"
SIMULATED_SENSOR_TYPE = "simulated"
AUTO_DETERMINE_SENSOR = "auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION = "local"
UDP_TRANSPORT_TYPE = "udp"
#DEFAULT_DESTINATION_PORT = "_sensors._udp"
#DEFAULT_DESTINATION_HOST = "192.168.56.1"
DEFAULT_DESTINATION_HOST = "" # listen on all interfaces
DEFAULT_DESTINATION_PORT = 59999
DEFAULT_BAUD = 38400
ARDUINO_USB_VENDOR_ID = 8352
ARDUINO_USB_PRODUCT_ID = 16720
|
Add Arduino USB Vendor and product IDs
|
Add Arduino USB Vendor and product IDs
|
Python
|
cc0-1.0
|
edwinsteele/sensorsproject,edwinsteele/sensorsproject
|
ARDUINO_SENSOR_TYPE="arduino"
SIMULATED_SENSOR_TYPE="simulated"
AUTO_DETERMINE_SENSOR="auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION="local"
UDP_TRANSPORT_TYPE="udp"
#DEFAULT_DESTINATION_PORT="_sensors._udp"
#DEFAULT_DESTINATION_HOST="192.168.56.1"
DEFAULT_DESTINATION_HOST="" # listen on all interfaces
DEFAULT_DESTINATION_PORT=59999
DEFAULT_BAUD=38400
Add Arduino USB Vendor and product IDs
|
ARDUINO_SENSOR_TYPE = "arduino"
SIMULATED_SENSOR_TYPE = "simulated"
AUTO_DETERMINE_SENSOR = "auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION = "local"
UDP_TRANSPORT_TYPE = "udp"
#DEFAULT_DESTINATION_PORT = "_sensors._udp"
#DEFAULT_DESTINATION_HOST = "192.168.56.1"
DEFAULT_DESTINATION_HOST = "" # listen on all interfaces
DEFAULT_DESTINATION_PORT = 59999
DEFAULT_BAUD = 38400
ARDUINO_USB_VENDOR_ID = 8352
ARDUINO_USB_PRODUCT_ID = 16720
|
<commit_before>
ARDUINO_SENSOR_TYPE="arduino"
SIMULATED_SENSOR_TYPE="simulated"
AUTO_DETERMINE_SENSOR="auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION="local"
UDP_TRANSPORT_TYPE="udp"
#DEFAULT_DESTINATION_PORT="_sensors._udp"
#DEFAULT_DESTINATION_HOST="192.168.56.1"
DEFAULT_DESTINATION_HOST="" # listen on all interfaces
DEFAULT_DESTINATION_PORT=59999
DEFAULT_BAUD=38400
<commit_msg>Add Arduino USB Vendor and product IDs<commit_after>
|
ARDUINO_SENSOR_TYPE = "arduino"
SIMULATED_SENSOR_TYPE = "simulated"
AUTO_DETERMINE_SENSOR = "auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION = "local"
UDP_TRANSPORT_TYPE = "udp"
#DEFAULT_DESTINATION_PORT = "_sensors._udp"
#DEFAULT_DESTINATION_HOST = "192.168.56.1"
DEFAULT_DESTINATION_HOST = "" # listen on all interfaces
DEFAULT_DESTINATION_PORT = 59999
DEFAULT_BAUD = 38400
ARDUINO_USB_VENDOR_ID = 8352
ARDUINO_USB_PRODUCT_ID = 16720
|
ARDUINO_SENSOR_TYPE="arduino"
SIMULATED_SENSOR_TYPE="simulated"
AUTO_DETERMINE_SENSOR="auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION="local"
UDP_TRANSPORT_TYPE="udp"
#DEFAULT_DESTINATION_PORT="_sensors._udp"
#DEFAULT_DESTINATION_HOST="192.168.56.1"
DEFAULT_DESTINATION_HOST="" # listen on all interfaces
DEFAULT_DESTINATION_PORT=59999
DEFAULT_BAUD=38400
Add Arduino USB Vendor and product IDs
ARDUINO_SENSOR_TYPE = "arduino"
SIMULATED_SENSOR_TYPE = "simulated"
AUTO_DETERMINE_SENSOR = "auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION = "local"
UDP_TRANSPORT_TYPE = "udp"
#DEFAULT_DESTINATION_PORT = "_sensors._udp"
#DEFAULT_DESTINATION_HOST = "192.168.56.1"
DEFAULT_DESTINATION_HOST = "" # listen on all interfaces
DEFAULT_DESTINATION_PORT = 59999
DEFAULT_BAUD = 38400
ARDUINO_USB_VENDOR_ID = 8352
ARDUINO_USB_PRODUCT_ID = 16720
|
<commit_before>
ARDUINO_SENSOR_TYPE="arduino"
SIMULATED_SENSOR_TYPE="simulated"
AUTO_DETERMINE_SENSOR="auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION="local"
UDP_TRANSPORT_TYPE="udp"
#DEFAULT_DESTINATION_PORT="_sensors._udp"
#DEFAULT_DESTINATION_HOST="192.168.56.1"
DEFAULT_DESTINATION_HOST="" # listen on all interfaces
DEFAULT_DESTINATION_PORT=59999
DEFAULT_BAUD=38400
<commit_msg>Add Arduino USB Vendor and product IDs<commit_after>
ARDUINO_SENSOR_TYPE = "arduino"
SIMULATED_SENSOR_TYPE = "simulated"
AUTO_DETERMINE_SENSOR = "auto"
# TODO - put this as a CNAME and an SRV record in DNS instead
IN_PROCESS_DESTINATION = "local"
UDP_TRANSPORT_TYPE = "udp"
#DEFAULT_DESTINATION_PORT = "_sensors._udp"
#DEFAULT_DESTINATION_HOST = "192.168.56.1"
DEFAULT_DESTINATION_HOST = "" # listen on all interfaces
DEFAULT_DESTINATION_PORT = 59999
DEFAULT_BAUD = 38400
ARDUINO_USB_VENDOR_ID = 8352
ARDUINO_USB_PRODUCT_ID = 16720
|
bcf4f7672efce52170b9eb5ce5bad8e3b81f969f
|
tests/test_evaluate.py
|
tests/test_evaluate.py
|
import numpy as np
from numpy.testing import assert_equal
from gala import evaluate as ev
def test_contingency_table():
seg = np.array([0, 1, 1, 1, 2, 2, 2, 3])
gt = np.array([1, 1, 1, 2, 2, 2, 2, 0])
ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[])
ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0])
ctd = ct.todense()
assert_equal(ctd, np.array([[0. , 0.125, 0. ],
[0. , 0.25 , 0.125],
[0. , 0. , 0.375],
[0.125, 0. , 0. ]]))
assert ct.shape == ct0.shape
|
Add test for contingency table fix
|
Add test for contingency table fix
|
Python
|
bsd-3-clause
|
janelia-flyem/gala,jni/gala
|
Add test for contingency table fix
|
import numpy as np
from numpy.testing import assert_equal
from gala import evaluate as ev
def test_contingency_table():
seg = np.array([0, 1, 1, 1, 2, 2, 2, 3])
gt = np.array([1, 1, 1, 2, 2, 2, 2, 0])
ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[])
ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0])
ctd = ct.todense()
assert_equal(ctd, np.array([[0. , 0.125, 0. ],
[0. , 0.25 , 0.125],
[0. , 0. , 0.375],
[0.125, 0. , 0. ]]))
assert ct.shape == ct0.shape
|
<commit_before><commit_msg>Add test for contingency table fix<commit_after>
|
import numpy as np
from numpy.testing import assert_equal
from gala import evaluate as ev
def test_contingency_table():
seg = np.array([0, 1, 1, 1, 2, 2, 2, 3])
gt = np.array([1, 1, 1, 2, 2, 2, 2, 0])
ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[])
ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0])
ctd = ct.todense()
assert_equal(ctd, np.array([[0. , 0.125, 0. ],
[0. , 0.25 , 0.125],
[0. , 0. , 0.375],
[0.125, 0. , 0. ]]))
assert ct.shape == ct0.shape
|
Add test for contingency table fiximport numpy as np
from numpy.testing import assert_equal
from gala import evaluate as ev
def test_contingency_table():
seg = np.array([0, 1, 1, 1, 2, 2, 2, 3])
gt = np.array([1, 1, 1, 2, 2, 2, 2, 0])
ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[])
ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0])
ctd = ct.todense()
assert_equal(ctd, np.array([[0. , 0.125, 0. ],
[0. , 0.25 , 0.125],
[0. , 0. , 0.375],
[0.125, 0. , 0. ]]))
assert ct.shape == ct0.shape
|
<commit_before><commit_msg>Add test for contingency table fix<commit_after>import numpy as np
from numpy.testing import assert_equal
from gala import evaluate as ev
def test_contingency_table():
seg = np.array([0, 1, 1, 1, 2, 2, 2, 3])
gt = np.array([1, 1, 1, 2, 2, 2, 2, 0])
ct = ev.contingency_table(seg, gt, ignore_seg=[], ignore_gt=[])
ct0 = ev.contingency_table(seg, gt, ignore_seg=[0], ignore_gt=[0])
ctd = ct.todense()
assert_equal(ctd, np.array([[0. , 0.125, 0. ],
[0. , 0.25 , 0.125],
[0. , 0. , 0.375],
[0.125, 0. , 0. ]]))
assert ct.shape == ct0.shape
|
|
4e66798eeac6a4c5dd0a6356aba99286b6ac4df7
|
polygraph/types/tests/test_union.py
|
polygraph/types/tests/test_union.py
|
from unittest import TestCase, skip
from polygraph.exceptions import PolygraphValueError
from polygraph.types.basic_type import Union
from polygraph.types.scalar import Float, Int, String
@skip # FIXME
class UnionTypeTest(TestCase):
def test_commutativity(self):
self.assertEqual(Union(String, Int), Union(Int, String))
def test_associativity(self):
self.assertEqual(
Union(Union(String, Int), Float),
Union(String, Int, Float),
)
def test_pipe_operator(self):
self.assertEqual(
String | Int,
Union(String, Int),
)
def test_pipe_operator_with_more_than_two_types(self):
self.assertEqual(
String | Int | Float,
Union(String, Int, Float),
)
class UnionValueTest(TestCase):
def test_valid_type(self):
union = String | Int
self.assertEqual(union(Int(32)), Int(32))
self.assertEqual(union(String("Test")), String("Test"))
def test_value_must_be_typed(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(32)
with self.assertRaises(PolygraphValueError):
union("Test")
def test_value_must_have_right_type(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(Float(32))
|
Add unit tests around the Union type
|
Add unit tests around the Union type
|
Python
|
mit
|
polygraph-python/polygraph
|
Add unit tests around the Union type
|
from unittest import TestCase, skip
from polygraph.exceptions import PolygraphValueError
from polygraph.types.basic_type import Union
from polygraph.types.scalar import Float, Int, String
@skip # FIXME
class UnionTypeTest(TestCase):
def test_commutativity(self):
self.assertEqual(Union(String, Int), Union(Int, String))
def test_associativity(self):
self.assertEqual(
Union(Union(String, Int), Float),
Union(String, Int, Float),
)
def test_pipe_operator(self):
self.assertEqual(
String | Int,
Union(String, Int),
)
def test_pipe_operator_with_more_than_two_types(self):
self.assertEqual(
String | Int | Float,
Union(String, Int, Float),
)
class UnionValueTest(TestCase):
def test_valid_type(self):
union = String | Int
self.assertEqual(union(Int(32)), Int(32))
self.assertEqual(union(String("Test")), String("Test"))
def test_value_must_be_typed(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(32)
with self.assertRaises(PolygraphValueError):
union("Test")
def test_value_must_have_right_type(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(Float(32))
|
<commit_before><commit_msg>Add unit tests around the Union type<commit_after>
|
from unittest import TestCase, skip
from polygraph.exceptions import PolygraphValueError
from polygraph.types.basic_type import Union
from polygraph.types.scalar import Float, Int, String
@skip # FIXME
class UnionTypeTest(TestCase):
def test_commutativity(self):
self.assertEqual(Union(String, Int), Union(Int, String))
def test_associativity(self):
self.assertEqual(
Union(Union(String, Int), Float),
Union(String, Int, Float),
)
def test_pipe_operator(self):
self.assertEqual(
String | Int,
Union(String, Int),
)
def test_pipe_operator_with_more_than_two_types(self):
self.assertEqual(
String | Int | Float,
Union(String, Int, Float),
)
class UnionValueTest(TestCase):
def test_valid_type(self):
union = String | Int
self.assertEqual(union(Int(32)), Int(32))
self.assertEqual(union(String("Test")), String("Test"))
def test_value_must_be_typed(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(32)
with self.assertRaises(PolygraphValueError):
union("Test")
def test_value_must_have_right_type(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(Float(32))
|
Add unit tests around the Union typefrom unittest import TestCase, skip
from polygraph.exceptions import PolygraphValueError
from polygraph.types.basic_type import Union
from polygraph.types.scalar import Float, Int, String
@skip # FIXME
class UnionTypeTest(TestCase):
def test_commutativity(self):
self.assertEqual(Union(String, Int), Union(Int, String))
def test_associativity(self):
self.assertEqual(
Union(Union(String, Int), Float),
Union(String, Int, Float),
)
def test_pipe_operator(self):
self.assertEqual(
String | Int,
Union(String, Int),
)
def test_pipe_operator_with_more_than_two_types(self):
self.assertEqual(
String | Int | Float,
Union(String, Int, Float),
)
class UnionValueTest(TestCase):
def test_valid_type(self):
union = String | Int
self.assertEqual(union(Int(32)), Int(32))
self.assertEqual(union(String("Test")), String("Test"))
def test_value_must_be_typed(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(32)
with self.assertRaises(PolygraphValueError):
union("Test")
def test_value_must_have_right_type(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(Float(32))
|
<commit_before><commit_msg>Add unit tests around the Union type<commit_after>from unittest import TestCase, skip
from polygraph.exceptions import PolygraphValueError
from polygraph.types.basic_type import Union
from polygraph.types.scalar import Float, Int, String
@skip # FIXME
class UnionTypeTest(TestCase):
def test_commutativity(self):
self.assertEqual(Union(String, Int), Union(Int, String))
def test_associativity(self):
self.assertEqual(
Union(Union(String, Int), Float),
Union(String, Int, Float),
)
def test_pipe_operator(self):
self.assertEqual(
String | Int,
Union(String, Int),
)
def test_pipe_operator_with_more_than_two_types(self):
self.assertEqual(
String | Int | Float,
Union(String, Int, Float),
)
class UnionValueTest(TestCase):
def test_valid_type(self):
union = String | Int
self.assertEqual(union(Int(32)), Int(32))
self.assertEqual(union(String("Test")), String("Test"))
def test_value_must_be_typed(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(32)
with self.assertRaises(PolygraphValueError):
union("Test")
def test_value_must_have_right_type(self):
union = String | Int
with self.assertRaises(PolygraphValueError):
union(Float(32))
|
|
47cfefd8a37d7face8f35911c089576450f26ef3
|
tests/sentry/utils/test_cursors.py
|
tests/sentry/utils/test_cursors.py
|
from __future__ import absolute_import
from mock import Mock
from sentry.utils.cursors import build_cursor, Cursor
def build_mock(**attrs):
obj = Mock()
for key, value in attrs.items():
setattr(obj, key, value)
obj.__repr__ = lambda x: repr(attrs)
return obj
def test_build_cursor():
event1 = build_mock(id=1.1, message='one')
event2 = build_mock(id=1.1, message='two')
event3 = build_mock(id=2.1, message='three')
results = [event1, event2, event3]
cursor = build_cursor(results, key='id', limit=1)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert not cursor.prev
assert list(cursor) == [event1]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event2]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event3]
|
Add test exhibiting cursor failure
|
Add test exhibiting cursor failure
|
Python
|
bsd-3-clause
|
gencer/sentry,1tush/sentry,ifduyue/sentry,songyi199111/sentry,fotinakis/sentry,jokey2k/sentry,BayanGroup/sentry,JamesMura/sentry,1tush/sentry,mvaled/sentry,wujuguang/sentry,kevinastone/sentry,gencer/sentry,looker/sentry,ewdurbin/sentry,nicholasserra/sentry,kevinlondon/sentry,boneyao/sentry,daevaorn/sentry,imankulov/sentry,nicholasserra/sentry,jean/sentry,zenefits/sentry,argonemyth/sentry,gg7/sentry,ngonzalvez/sentry,drcapulet/sentry,korealerts1/sentry,imankulov/sentry,daevaorn/sentry,1tush/sentry,jean/sentry,vperron/sentry,kevinastone/sentry,songyi199111/sentry,JTCunning/sentry,looker/sentry,llonchj/sentry,mvaled/sentry,llonchj/sentry,hongliang5623/sentry,looker/sentry,fotinakis/sentry,vperron/sentry,JamesMura/sentry,argonemyth/sentry,wong2/sentry,daevaorn/sentry,drcapulet/sentry,drcapulet/sentry,zenefits/sentry,BuildingLink/sentry,felixbuenemann/sentry,korealerts1/sentry,songyi199111/sentry,JamesMura/sentry,JamesMura/sentry,JTCunning/sentry,korealerts1/sentry,wong2/sentry,jean/sentry,ifduyue/sentry,gencer/sentry,Natim/sentry,looker/sentry,TedaLIEz/sentry,pauloschilling/sentry,Kryz/sentry,alexm92/sentry,kevinastone/sentry,ewdurbin/sentry,ifduyue/sentry,argonemyth/sentry,mvaled/sentry,BayanGroup/sentry,felixbuenemann/sentry,ewdurbin/sentry,llonchj/sentry,gg7/sentry,Kryz/sentry,fuziontech/sentry,Kryz/sentry,hongliang5623/sentry,JamesMura/sentry,looker/sentry,felixbuenemann/sentry,ngonzalvez/sentry,wujuguang/sentry,zenefits/sentry,BuildingLink/sentry,kevinlondon/sentry,vperron/sentry,ngonzalvez/sentry,gg7/sentry,BuildingLink/sentry,fotinakis/sentry,zenefits/sentry,camilonova/sentry,jokey2k/sentry,mvaled/sentry,JackDanger/sentry,BayanGroup/sentry,fuziontech/sentry,boneyao/sentry,camilonova/sentry,fuziontech/sentry,mitsuhiko/sentry,imankulov/sentry,pauloschilling/sentry,BuildingLink/sentry,kevinlondon/sentry,wong2/sentry,pauloschilling/sentry,mvaled/sentry,wujuguang/sentry,Natim/sentry,BuildingLink/sentry,mitsuhiko/sentry,daevaorn/sentry,JackDanger/sentry,jean/sentry,Natim/sentry,alexm92/sentry,zenefits/sentry,gencer/sentry,fotinakis/sentry,ifduyue/sentry,nicholasserra/sentry,ifduyue/sentry,TedaLIEz/sentry,boneyao/sentry,JTCunning/sentry,hongliang5623/sentry,beeftornado/sentry,TedaLIEz/sentry,beeftornado/sentry,mvaled/sentry,camilonova/sentry,JackDanger/sentry,jean/sentry,beeftornado/sentry,gencer/sentry,jokey2k/sentry,alexm92/sentry
|
Add test exhibiting cursor failure
|
from __future__ import absolute_import
from mock import Mock
from sentry.utils.cursors import build_cursor, Cursor
def build_mock(**attrs):
obj = Mock()
for key, value in attrs.items():
setattr(obj, key, value)
obj.__repr__ = lambda x: repr(attrs)
return obj
def test_build_cursor():
event1 = build_mock(id=1.1, message='one')
event2 = build_mock(id=1.1, message='two')
event3 = build_mock(id=2.1, message='three')
results = [event1, event2, event3]
cursor = build_cursor(results, key='id', limit=1)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert not cursor.prev
assert list(cursor) == [event1]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event2]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event3]
|
<commit_before><commit_msg>Add test exhibiting cursor failure<commit_after>
|
from __future__ import absolute_import
from mock import Mock
from sentry.utils.cursors import build_cursor, Cursor
def build_mock(**attrs):
obj = Mock()
for key, value in attrs.items():
setattr(obj, key, value)
obj.__repr__ = lambda x: repr(attrs)
return obj
def test_build_cursor():
event1 = build_mock(id=1.1, message='one')
event2 = build_mock(id=1.1, message='two')
event3 = build_mock(id=2.1, message='three')
results = [event1, event2, event3]
cursor = build_cursor(results, key='id', limit=1)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert not cursor.prev
assert list(cursor) == [event1]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event2]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event3]
|
Add test exhibiting cursor failurefrom __future__ import absolute_import
from mock import Mock
from sentry.utils.cursors import build_cursor, Cursor
def build_mock(**attrs):
obj = Mock()
for key, value in attrs.items():
setattr(obj, key, value)
obj.__repr__ = lambda x: repr(attrs)
return obj
def test_build_cursor():
event1 = build_mock(id=1.1, message='one')
event2 = build_mock(id=1.1, message='two')
event3 = build_mock(id=2.1, message='three')
results = [event1, event2, event3]
cursor = build_cursor(results, key='id', limit=1)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert not cursor.prev
assert list(cursor) == [event1]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event2]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event3]
|
<commit_before><commit_msg>Add test exhibiting cursor failure<commit_after>from __future__ import absolute_import
from mock import Mock
from sentry.utils.cursors import build_cursor, Cursor
def build_mock(**attrs):
obj = Mock()
for key, value in attrs.items():
setattr(obj, key, value)
obj.__repr__ = lambda x: repr(attrs)
return obj
def test_build_cursor():
event1 = build_mock(id=1.1, message='one')
event2 = build_mock(id=1.1, message='two')
event3 = build_mock(id=2.1, message='three')
results = [event1, event2, event3]
cursor = build_cursor(results, key='id', limit=1)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert not cursor.prev
assert list(cursor) == [event1]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event2]
cursor = build_cursor(results, key='id', limit=1, cursor=cursor.next)
assert isinstance(cursor.next, Cursor)
assert cursor.next
assert isinstance(cursor.prev, Cursor)
assert cursor.prev
assert list(cursor) == [event3]
|
|
ba06e9cc8a719c5b362ff63a7ea8aace459fa6a1
|
tests/test_utilities_efficiency.py
|
tests/test_utilities_efficiency.py
|
# -*- coding: utf-8 -*-
import unittest
from brew.utilities.efficiency import calculate_brew_house_yield
from fixtures import recipe
class TestEfficiencyUtilities(unittest.TestCase):
def setUp(self):
self.recipe = recipe
def test_calculate_brew_house_yield(self):
out = calculate_brew_house_yield(recipe.start_volume,
recipe.og,
recipe.grain_additions)
expected = 0.98
self.assertEquals(round(out, 3), expected)
|
Add tests for calculating brew house yield
|
Add tests for calculating brew house yield
|
Python
|
mit
|
chrisgilmerproj/brewday,chrisgilmerproj/brewday
|
Add tests for calculating brew house yield
|
# -*- coding: utf-8 -*-
import unittest
from brew.utilities.efficiency import calculate_brew_house_yield
from fixtures import recipe
class TestEfficiencyUtilities(unittest.TestCase):
def setUp(self):
self.recipe = recipe
def test_calculate_brew_house_yield(self):
out = calculate_brew_house_yield(recipe.start_volume,
recipe.og,
recipe.grain_additions)
expected = 0.98
self.assertEquals(round(out, 3), expected)
|
<commit_before><commit_msg>Add tests for calculating brew house yield<commit_after>
|
# -*- coding: utf-8 -*-
import unittest
from brew.utilities.efficiency import calculate_brew_house_yield
from fixtures import recipe
class TestEfficiencyUtilities(unittest.TestCase):
def setUp(self):
self.recipe = recipe
def test_calculate_brew_house_yield(self):
out = calculate_brew_house_yield(recipe.start_volume,
recipe.og,
recipe.grain_additions)
expected = 0.98
self.assertEquals(round(out, 3), expected)
|
Add tests for calculating brew house yield# -*- coding: utf-8 -*-
import unittest
from brew.utilities.efficiency import calculate_brew_house_yield
from fixtures import recipe
class TestEfficiencyUtilities(unittest.TestCase):
def setUp(self):
self.recipe = recipe
def test_calculate_brew_house_yield(self):
out = calculate_brew_house_yield(recipe.start_volume,
recipe.og,
recipe.grain_additions)
expected = 0.98
self.assertEquals(round(out, 3), expected)
|
<commit_before><commit_msg>Add tests for calculating brew house yield<commit_after># -*- coding: utf-8 -*-
import unittest
from brew.utilities.efficiency import calculate_brew_house_yield
from fixtures import recipe
class TestEfficiencyUtilities(unittest.TestCase):
def setUp(self):
self.recipe = recipe
def test_calculate_brew_house_yield(self):
out = calculate_brew_house_yield(recipe.start_volume,
recipe.og,
recipe.grain_additions)
expected = 0.98
self.assertEquals(round(out, 3), expected)
|
|
a7abc52fd5e2920070f00678cf788d6bcf1e6ed0
|
carl/ratios/__init__.py
|
carl/ratios/__init__.py
|
# -*- coding: utf-8 -*-
#
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Density ratio estimators."""
|
Structure for density ratio estimators
|
Structure for density ratio estimators
|
Python
|
bsd-3-clause
|
diana-hep/carl,diana-hep/carl
|
Structure for density ratio estimators
|
# -*- coding: utf-8 -*-
#
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Density ratio estimators."""
|
<commit_before><commit_msg>Structure for density ratio estimators<commit_after>
|
# -*- coding: utf-8 -*-
#
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Density ratio estimators."""
|
Structure for density ratio estimators# -*- coding: utf-8 -*-
#
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Density ratio estimators."""
|
<commit_before><commit_msg>Structure for density ratio estimators<commit_after># -*- coding: utf-8 -*-
#
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
"""Density ratio estimators."""
|
|
5026583c661158ca7f5326b12a4737609111a741
|
peerinst/tests/test_management_commands.py
|
peerinst/tests/test_management_commands.py
|
import os
import mock
from django.core.management import call_command
from django.db.utils import DatabaseError
from django.test import TestCase
devnull = open(os.devnull, 'w')
@mock.patch("sys.stdout", devnull) # Get rid of output this command prints to user
class SanityCheckTest(TestCase):
def test_sanity_check_command_positive(self):
call_command("sanity_check")
def test_sanity_check_negative(self):
# We want to check if this command fails if there is any connection error
# to the database, so we are patching cursor() call
with mock.patch("django.db.connection.cursor", side_effect=DatabaseError()):
with self.assertRaises(Exception):
call_command("sanity_check")
|
Add tests to sanity check command
|
Add tests to sanity check command
|
Python
|
agpl-3.0
|
open-craft/dalite-ng,open-craft/dalite-ng,open-craft/dalite-ng
|
Add tests to sanity check command
|
import os
import mock
from django.core.management import call_command
from django.db.utils import DatabaseError
from django.test import TestCase
devnull = open(os.devnull, 'w')
@mock.patch("sys.stdout", devnull) # Get rid of output this command prints to user
class SanityCheckTest(TestCase):
def test_sanity_check_command_positive(self):
call_command("sanity_check")
def test_sanity_check_negative(self):
# We want to check if this command fails if there is any connection error
# to the database, so we are patching cursor() call
with mock.patch("django.db.connection.cursor", side_effect=DatabaseError()):
with self.assertRaises(Exception):
call_command("sanity_check")
|
<commit_before><commit_msg>Add tests to sanity check command<commit_after>
|
import os
import mock
from django.core.management import call_command
from django.db.utils import DatabaseError
from django.test import TestCase
devnull = open(os.devnull, 'w')
@mock.patch("sys.stdout", devnull) # Get rid of output this command prints to user
class SanityCheckTest(TestCase):
def test_sanity_check_command_positive(self):
call_command("sanity_check")
def test_sanity_check_negative(self):
# We want to check if this command fails if there is any connection error
# to the database, so we are patching cursor() call
with mock.patch("django.db.connection.cursor", side_effect=DatabaseError()):
with self.assertRaises(Exception):
call_command("sanity_check")
|
Add tests to sanity check command
import os
import mock
from django.core.management import call_command
from django.db.utils import DatabaseError
from django.test import TestCase
devnull = open(os.devnull, 'w')
@mock.patch("sys.stdout", devnull) # Get rid of output this command prints to user
class SanityCheckTest(TestCase):
def test_sanity_check_command_positive(self):
call_command("sanity_check")
def test_sanity_check_negative(self):
# We want to check if this command fails if there is any connection error
# to the database, so we are patching cursor() call
with mock.patch("django.db.connection.cursor", side_effect=DatabaseError()):
with self.assertRaises(Exception):
call_command("sanity_check")
|
<commit_before><commit_msg>Add tests to sanity check command<commit_after>
import os
import mock
from django.core.management import call_command
from django.db.utils import DatabaseError
from django.test import TestCase
devnull = open(os.devnull, 'w')
@mock.patch("sys.stdout", devnull) # Get rid of output this command prints to user
class SanityCheckTest(TestCase):
def test_sanity_check_command_positive(self):
call_command("sanity_check")
def test_sanity_check_negative(self):
# We want to check if this command fails if there is any connection error
# to the database, so we are patching cursor() call
with mock.patch("django.db.connection.cursor", side_effect=DatabaseError()):
with self.assertRaises(Exception):
call_command("sanity_check")
|
|
6182cc9f1b58f96479a46dea03ffe98a6244e7e0
|
viewer_examples/plugins/canny_simple.py
|
viewer_examples/plugins/canny_simple.py
|
from skimage import data
from skimage.filter import canny
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider
from skimage.viewer.plugins.overlayplugin import OverlayPlugin
image = data.camera()
# Note: ImageViewer must be called before Plugin b/c it starts the event loop.
viewer = ImageViewer(image)
# You can create a UI for a filter just by passing a filter function...
plugin = OverlayPlugin(image_filter=canny)
# ... and adding widgets to adjust parameter values.
plugin += Slider('sigma', 0, 5, update_on='release')
plugin += Slider('low threshold', 0, 255, update_on='release')
plugin += Slider('high threshold', 0, 255, update_on='release')
# Finally, attach the plugin to the image viewer.
viewer += plugin
viewer.show()
|
Add example of adding widgets to plugin
|
ENH: Add example of adding widgets to plugin
|
Python
|
bsd-3-clause
|
bennlich/scikit-image,pratapvardhan/scikit-image,emon10005/scikit-image,juliusbierk/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,emon10005/scikit-image,SamHames/scikit-image,rjeli/scikit-image,dpshelio/scikit-image,paalge/scikit-image,chintak/scikit-image,Midafi/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,Britefury/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,rjeli/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,Midafi/scikit-image,jwiggins/scikit-image,robintw/scikit-image,warmspringwinds/scikit-image,keflavich/scikit-image,ajaybhat/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,bsipocz/scikit-image,jwiggins/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,blink1073/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,chintak/scikit-image,oew1v07/scikit-image,GaZ3ll3/scikit-image,bennlich/scikit-image,newville/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,ClinicalGraphics/scikit-image,WarrenWeckesser/scikits-image,almarklein/scikit-image,SamHames/scikit-image,robintw/scikit-image,youprofit/scikit-image,youprofit/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,Britefury/scikit-image
|
ENH: Add example of adding widgets to plugin
|
from skimage import data
from skimage.filter import canny
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider
from skimage.viewer.plugins.overlayplugin import OverlayPlugin
image = data.camera()
# Note: ImageViewer must be called before Plugin b/c it starts the event loop.
viewer = ImageViewer(image)
# You can create a UI for a filter just by passing a filter function...
plugin = OverlayPlugin(image_filter=canny)
# ... and adding widgets to adjust parameter values.
plugin += Slider('sigma', 0, 5, update_on='release')
plugin += Slider('low threshold', 0, 255, update_on='release')
plugin += Slider('high threshold', 0, 255, update_on='release')
# Finally, attach the plugin to the image viewer.
viewer += plugin
viewer.show()
|
<commit_before><commit_msg>ENH: Add example of adding widgets to plugin<commit_after>
|
from skimage import data
from skimage.filter import canny
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider
from skimage.viewer.plugins.overlayplugin import OverlayPlugin
image = data.camera()
# Note: ImageViewer must be called before Plugin b/c it starts the event loop.
viewer = ImageViewer(image)
# You can create a UI for a filter just by passing a filter function...
plugin = OverlayPlugin(image_filter=canny)
# ... and adding widgets to adjust parameter values.
plugin += Slider('sigma', 0, 5, update_on='release')
plugin += Slider('low threshold', 0, 255, update_on='release')
plugin += Slider('high threshold', 0, 255, update_on='release')
# Finally, attach the plugin to the image viewer.
viewer += plugin
viewer.show()
|
ENH: Add example of adding widgets to pluginfrom skimage import data
from skimage.filter import canny
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider
from skimage.viewer.plugins.overlayplugin import OverlayPlugin
image = data.camera()
# Note: ImageViewer must be called before Plugin b/c it starts the event loop.
viewer = ImageViewer(image)
# You can create a UI for a filter just by passing a filter function...
plugin = OverlayPlugin(image_filter=canny)
# ... and adding widgets to adjust parameter values.
plugin += Slider('sigma', 0, 5, update_on='release')
plugin += Slider('low threshold', 0, 255, update_on='release')
plugin += Slider('high threshold', 0, 255, update_on='release')
# Finally, attach the plugin to the image viewer.
viewer += plugin
viewer.show()
|
<commit_before><commit_msg>ENH: Add example of adding widgets to plugin<commit_after>from skimage import data
from skimage.filter import canny
from skimage.viewer import ImageViewer
from skimage.viewer.widgets import Slider
from skimage.viewer.plugins.overlayplugin import OverlayPlugin
image = data.camera()
# Note: ImageViewer must be called before Plugin b/c it starts the event loop.
viewer = ImageViewer(image)
# You can create a UI for a filter just by passing a filter function...
plugin = OverlayPlugin(image_filter=canny)
# ... and adding widgets to adjust parameter values.
plugin += Slider('sigma', 0, 5, update_on='release')
plugin += Slider('low threshold', 0, 255, update_on='release')
plugin += Slider('high threshold', 0, 255, update_on='release')
# Finally, attach the plugin to the image viewer.
viewer += plugin
viewer.show()
|
|
952f15762e083d55e8d56e09a40c2b5470bd8e0d
|
tools/send_github_payloads.py
|
tools/send_github_payloads.py
|
#!/usr/bin/python
import sys
import os
import simplejson
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'api'))
import zulip
zulip_client = zulip.Client(site="http://localhost:9991")
payload_dir = "zerver/fixtures/github"
for filename in os.listdir(payload_dir):
with open(os.path.join(payload_dir, filename)) as f:
req = simplejson.loads(f.read())
req['api-key'] = zulip_client.api_key
req['email'] = zulip_client.email
zulip_client.do_api_query(req, zulip.API_VERSTRING + "external/github")
|
Add tool for sending Github payloads to a local instance
|
Add tool for sending Github payloads to a local instance
This is very useful for testing our Github integration.
(imported from commit c61fd883c599395d31416a25090e57594fddeadf)
|
Python
|
apache-2.0
|
Suninus/zulip,xuanhan863/zulip,bitemyapp/zulip,jphilipsen05/zulip,mahim97/zulip,amanharitsh123/zulip,zorojean/zulip,zacps/zulip,ApsOps/zulip,eeshangarg/zulip,Cheppers/zulip,andersk/zulip,TigorC/zulip,Suninus/zulip,ipernet/zulip,EasonYi/zulip,eastlhu/zulip,KJin99/zulip,pradiptad/zulip,qq1012803704/zulip,suxinde2009/zulip,jerryge/zulip,Galexrt/zulip,Suninus/zulip,brainwane/zulip,KingxBanana/zulip,hustlzp/zulip,saitodisse/zulip,ApsOps/zulip,codeKonami/zulip,hackerkid/zulip,technicalpickles/zulip,technicalpickles/zulip,firstblade/zulip,themass/zulip,moria/zulip,guiquanz/zulip,joshisa/zulip,vikas-parashar/zulip,jrowan/zulip,hengqujushi/zulip,dxq-git/zulip,voidException/zulip,ryanbackman/zulip,cosmicAsymmetry/zulip,voidException/zulip,hustlzp/zulip,umkay/zulip,ryanbackman/zulip,kou/zulip,xuxiao/zulip,proliming/zulip,jimmy54/zulip,hayderimran7/zulip,zachallaun/zulip,johnnygaddarr/zulip,littledogboy/zulip,mohsenSy/zulip,zhaoweigg/zulip,Cheppers/zulip,TigorC/zulip,jainayush975/zulip,andersk/zulip,brainwane/zulip,bastianh/zulip,jainayush975/zulip,souravbadami/zulip,Diptanshu8/zulip,Gabriel0402/zulip,technicalpickles/zulip,armooo/zulip,atomic-labs/zulip,easyfmxu/zulip,nicholasbs/zulip,dxq-git/zulip,Gabriel0402/zulip,kaiyuanheshang/zulip,jerryge/zulip,tiansiyuan/zulip,yocome/zulip,mansilladev/zulip,RobotCaleb/zulip,aps-sids/zulip,zorojean/zulip,synicalsyntax/zulip,he15his/zulip,saitodisse/zulip,ahmadassaf/zulip,amyliu345/zulip,he15his/zulip,zorojean/zulip,dxq-git/zulip,johnnygaddarr/zulip,mdavid/zulip,tommyip/zulip,wangdeshui/zulip,Drooids/zulip,krtkmj/zulip,PaulPetring/zulip,wavelets/zulip,ericzhou2008/zulip,codeKonami/zulip,arpith/zulip,suxinde2009/zulip,andersk/zulip,alliejones/zulip,nicholasbs/zulip,zorojean/zulip,aakash-cr7/zulip,dhcrzf/zulip,luyifan/zulip,alliejones/zulip,bssrdf/zulip,DazWorrall/zulip,Qgap/zulip,bitemyapp/zulip,RobotCaleb/zulip,jackrzhang/zulip,bluesea/zulip,firstblade/zulip,wangdeshui/zulip,littledogboy/zulip,peiwei/zulip,dattatreya303/zulip,cosmicAsymmetry/zulip,krtkmj/zulip,brockwhittaker/zulip,PaulPetring/zulip,ryanbackman/zulip,krtkmj/zulip,karamcnair/zulip,Diptanshu8/zulip,LeeRisk/zulip,DazWorrall/zulip,peiwei/zulip,hengqujushi/zulip,ericzhou2008/zulip,dwrpayne/zulip,PaulPetring/zulip,aliceriot/zulip,huangkebo/zulip,niftynei/zulip,sup95/zulip,karamcnair/zulip,shubhamdhama/zulip,qq1012803704/zulip,avastu/zulip,niftynei/zulip,zofuthan/zulip,hayderimran7/zulip,tbutter/zulip,dhcrzf/zulip,hayderimran7/zulip,kokoar/zulip,easyfmxu/zulip,mohsenSy/zulip,voidException/zulip,souravbadami/zulip,dattatreya303/zulip,punchagan/zulip,jerryge/zulip,aliceriot/zulip,krtkmj/zulip,Suninus/zulip,dawran6/zulip,avastu/zulip,hackerkid/zulip,Vallher/zulip,swinghu/zulip,johnny9/zulip,Vallher/zulip,itnihao/zulip,bastianh/zulip,Diptanshu8/zulip,alliejones/zulip,pradiptad/zulip,joshisa/zulip,hj3938/zulip,armooo/zulip,PhilSk/zulip,praveenaki/zulip,karamcnair/zulip,paxapy/zulip,developerfm/zulip,SmartPeople/zulip,dnmfarrell/zulip,tiansiyuan/zulip,fw1121/zulip,wangdeshui/zulip,peiwei/zulip,Juanvulcano/zulip,natanovia/zulip,ashwinirudrappa/zulip,isht3/zulip,hj3938/zulip,Vallher/zulip,mohsenSy/zulip,esander91/zulip,Gabriel0402/zulip,Frouk/zulip,shaunstanislaus/zulip,joshisa/zulip,easyfmxu/zulip,mansilladev/zulip,deer-hope/zulip,MariaFaBella85/zulip,aps-sids/zulip,yuvipanda/zulip,showell/zulip,babbage/zulip,ikasumiwt/zulip,seapasulli/zulip,mahim97/zulip,luyifan/zulip,praveenaki/zulip,punchagan/zulip,vabs22/zulip,praveenaki/zulip,firstblade/zulip,vakila/zulip,zhaoweigg/zulip,seapasulli/zulip,lfranchi/zulip,zulip/zulip,luyifan/zulip,hustlzp/zulip,blaze225/zulip,Qgap/zulip,babbage/zulip,hafeez3000/zulip,shrikrishnaholla/zulip,verma-varsha/zulip,SmartPeople/zulip,zhaoweigg/zulip,Galexrt/zulip,amallia/zulip,thomasboyt/zulip,wdaher/zulip,suxinde2009/zulip,rht/zulip,jonesgithub/zulip,tommyip/zulip,m1ssou/zulip,verma-varsha/zulip,sonali0901/zulip,ipernet/zulip,dwrpayne/zulip,shaunstanislaus/zulip,LAndreas/zulip,kaiyuanheshang/zulip,avastu/zulip,eastlhu/zulip,hafeez3000/zulip,arpitpanwar/zulip,proliming/zulip,esander91/zulip,luyifan/zulip,xuanhan863/zulip,gigawhitlocks/zulip,Vallher/zulip,jeffcao/zulip,alliejones/zulip,technicalpickles/zulip,saitodisse/zulip,wangdeshui/zulip,Juanvulcano/zulip,jonesgithub/zulip,KJin99/zulip,SmartPeople/zulip,zwily/zulip,christi3k/zulip,moria/zulip,esander91/zulip,sharmaeklavya2/zulip,jackrzhang/zulip,johnny9/zulip,dawran6/zulip,umkay/zulip,shubhamdhama/zulip,ufosky-server/zulip,he15his/zulip,m1ssou/zulip,AZtheAsian/zulip,dwrpayne/zulip,hengqujushi/zulip,deer-hope/zulip,DazWorrall/zulip,rht/zulip,dnmfarrell/zulip,atomic-labs/zulip,PhilSk/zulip,dhcrzf/zulip,paxapy/zulip,glovebx/zulip,peiwei/zulip,themass/zulip,tbutter/zulip,vaidap/zulip,alliejones/zulip,babbage/zulip,lfranchi/zulip,jeffcao/zulip,vaidap/zulip,hustlzp/zulip,JPJPJPOPOP/zulip,shrikrishnaholla/zulip,suxinde2009/zulip,synicalsyntax/zulip,mahim97/zulip,yocome/zulip,tommyip/zulip,yocome/zulip,johnnygaddarr/zulip,levixie/zulip,Qgap/zulip,adnanh/zulip,guiquanz/zulip,paxapy/zulip,xuxiao/zulip,zachallaun/zulip,deer-hope/zulip,dotcool/zulip,littledogboy/zulip,ufosky-server/zulip,gkotian/zulip,johnnygaddarr/zulip,gigawhitlocks/zulip,showell/zulip,deer-hope/zulip,jphilipsen05/zulip,yuvipanda/zulip,umkay/zulip,kou/zulip,vakila/zulip,fw1121/zulip,itnihao/zulip,eastlhu/zulip,JanzTam/zulip,EasonYi/zulip,shrikrishnaholla/zulip,tdr130/zulip,zachallaun/zulip,andersk/zulip,showell/zulip,kaiyuanheshang/zulip,johnny9/zulip,isht3/zulip,zorojean/zulip,Drooids/zulip,Batterfii/zulip,ericzhou2008/zulip,mdavid/zulip,amanharitsh123/zulip,Drooids/zulip,tdr130/zulip,bluesea/zulip,xuanhan863/zulip,rht/zulip,aliceriot/zulip,xuanhan863/zulip,armooo/zulip,LeeRisk/zulip,rishig/zulip,littledogboy/zulip,ashwinirudrappa/zulip,levixie/zulip,moria/zulip,brockwhittaker/zulip,adnanh/zulip,johnnygaddarr/zulip,tdr130/zulip,susansls/zulip,luyifan/zulip,developerfm/zulip,peguin40/zulip,Qgap/zulip,deer-hope/zulip,jeffcao/zulip,synicalsyntax/zulip,akuseru/zulip,amyliu345/zulip,sup95/zulip,technicalpickles/zulip,jphilipsen05/zulip,wweiradio/zulip,umkay/zulip,easyfmxu/zulip,guiquanz/zulip,levixie/zulip,babbage/zulip,xuanhan863/zulip,EasonYi/zulip,codeKonami/zulip,proliming/zulip,Batterfii/zulip,brockwhittaker/zulip,suxinde2009/zulip,joshisa/zulip,qq1012803704/zulip,jimmy54/zulip,shaunstanislaus/zulip,natanovia/zulip,amallia/zulip,jeffcao/zulip,vikas-parashar/zulip,dotcool/zulip,vabs22/zulip,shubhamdhama/zulip,jrowan/zulip,Cheppers/zulip,reyha/zulip,bluesea/zulip,synicalsyntax/zulip,punchagan/zulip,eeshangarg/zulip,gigawhitlocks/zulip,m1ssou/zulip,themass/zulip,armooo/zulip,AZtheAsian/zulip,Batterfii/zulip,aakash-cr7/zulip,calvinleenyc/zulip,Cheppers/zulip,firstblade/zulip,schatt/zulip,wweiradio/zulip,fw1121/zulip,levixie/zulip,alliejones/zulip,jrowan/zulip,wweiradio/zulip,rht/zulip,tiansiyuan/zulip,DazWorrall/zulip,grave-w-grave/zulip,amyliu345/zulip,swinghu/zulip,mdavid/zulip,mohsenSy/zulip,dhcrzf/zulip,MariaFaBella85/zulip,MayB/zulip,wavelets/zulip,grave-w-grave/zulip,tdr130/zulip,dwrpayne/zulip,lfranchi/zulip,willingc/zulip,sonali0901/zulip,joshisa/zulip,zulip/zulip,eastlhu/zulip,karamcnair/zulip,jainayush975/zulip,jrowan/zulip,ryansnowboarder/zulip,themass/zulip,dxq-git/zulip,LeeRisk/zulip,bluesea/zulip,thomasboyt/zulip,hafeez3000/zulip,avastu/zulip,ufosky-server/zulip,MariaFaBella85/zulip,dotcool/zulip,Cheppers/zulip,dwrpayne/zulip,ahmadassaf/zulip,thomasboyt/zulip,deer-hope/zulip,yuvipanda/zulip,Juanvulcano/zulip,vakila/zulip,ahmadassaf/zulip,jimmy54/zulip,zacps/zulip,thomasboyt/zulip,lfranchi/zulip,bowlofstew/zulip,brainwane/zulip,zhaoweigg/zulip,firstblade/zulip,ashwinirudrappa/zulip,jackrzhang/zulip,kou/zulip,esander91/zulip,ApsOps/zulip,peiwei/zulip,itnihao/zulip,developerfm/zulip,easyfmxu/zulip,natanovia/zulip,TigorC/zulip,AZtheAsian/zulip,huangkebo/zulip,PaulPetring/zulip,guiquanz/zulip,sharmaeklavya2/zulip,yuvipanda/zulip,ufosky-server/zulip,johnnygaddarr/zulip,dotcool/zulip,developerfm/zulip,bastianh/zulip,hafeez3000/zulip,bowlofstew/zulip,showell/zulip,moria/zulip,mohsenSy/zulip,bitemyapp/zulip,developerfm/zulip,arpith/zulip,ahmadassaf/zulip,brockwhittaker/zulip,punchagan/zulip,zofuthan/zulip,wdaher/zulip,udxxabp/zulip,udxxabp/zulip,wdaher/zulip,souravbadami/zulip,mdavid/zulip,Cheppers/zulip,arpith/zulip,krtkmj/zulip,JPJPJPOPOP/zulip,LeeRisk/zulip,pradiptad/zulip,pradiptad/zulip,codeKonami/zulip,dotcool/zulip,LAndreas/zulip,DazWorrall/zulip,proliming/zulip,jessedhillon/zulip,praveenaki/zulip,shubhamdhama/zulip,ericzhou2008/zulip,rishig/zulip,bastianh/zulip,PaulPetring/zulip,JanzTam/zulip,atomic-labs/zulip,wavelets/zulip,zwily/zulip,cosmicAsymmetry/zulip,aps-sids/zulip,brainwane/zulip,schatt/zulip,bitemyapp/zulip,joyhchen/zulip,bowlofstew/zulip,tiansiyuan/zulip,ApsOps/zulip,atomic-labs/zulip,akuseru/zulip,timabbott/zulip,wavelets/zulip,KJin99/zulip,zulip/zulip,Batterfii/zulip,vabs22/zulip,ipernet/zulip,andersk/zulip,JPJPJPOPOP/zulip,TigorC/zulip,peguin40/zulip,KingxBanana/zulip,showell/zulip,paxapy/zulip,reyha/zulip,niftynei/zulip,timabbott/zulip,calvinleenyc/zulip,jessedhillon/zulip,stamhe/zulip,tbutter/zulip,timabbott/zulip,esander91/zulip,fw1121/zulip,codeKonami/zulip,Galexrt/zulip,voidException/zulip,aps-sids/zulip,wweiradio/zulip,Galexrt/zulip,arpitpanwar/zulip,noroot/zulip,susansls/zulip,hustlzp/zulip,susansls/zulip,mansilladev/zulip,jonesgithub/zulip,JPJPJPOPOP/zulip,bastianh/zulip,punchagan/zulip,Frouk/zulip,praveenaki/zulip,karamcnair/zulip,wavelets/zulip,eeshangarg/zulip,MariaFaBella85/zulip,easyfmxu/zulip,brainwane/zulip,qq1012803704/zulip,dattatreya303/zulip,bastianh/zulip,gigawhitlocks/zulip,wangdeshui/zulip,shrikrishnaholla/zulip,souravbadami/zulip,yocome/zulip,zwily/zulip,jeffcao/zulip,sonali0901/zulip,m1ssou/zulip,j831/zulip,noroot/zulip,sup95/zulip,shaunstanislaus/zulip,huangkebo/zulip,joyhchen/zulip,arpitpanwar/zulip,hackerkid/zulip,susansls/zulip,udxxabp/zulip,glovebx/zulip,vikas-parashar/zulip,zwily/zulip,wdaher/zulip,bssrdf/zulip,mansilladev/zulip,sharmaeklavya2/zulip,glovebx/zulip,dwrpayne/zulip,JanzTam/zulip,swinghu/zulip,DazWorrall/zulip,vikas-parashar/zulip,gkotian/zulip,Cheppers/zulip,babbage/zulip,MayB/zulip,blaze225/zulip,peguin40/zulip,bssrdf/zulip,shrikrishnaholla/zulip,wdaher/zulip,xuxiao/zulip,Vallher/zulip,Juanvulcano/zulip,udxxabp/zulip,swinghu/zulip,hayderimran7/zulip,arpitpanwar/zulip,akuseru/zulip,lfranchi/zulip,ikasumiwt/zulip,amanharitsh123/zulip,Batterfii/zulip,isht3/zulip,themass/zulip,grave-w-grave/zulip,KingxBanana/zulip,reyha/zulip,kaiyuanheshang/zulip,reyha/zulip,Batterfii/zulip,eeshangarg/zulip,ryansnowboarder/zulip,willingc/zulip,guiquanz/zulip,calvinleenyc/zulip,huangkebo/zulip,jessedhillon/zulip,SmartPeople/zulip,RobotCaleb/zulip,niftynei/zulip,RobotCaleb/zulip,karamcnair/zulip,qq1012803704/zulip,dotcool/zulip,kou/zulip,so0k/zulip,paxapy/zulip,jonesgithub/zulip,adnanh/zulip,xuanhan863/zulip,willingc/zulip,he15his/zulip,avastu/zulip,developerfm/zulip,pradiptad/zulip,adnanh/zulip,dattatreya303/zulip,TigorC/zulip,dotcool/zulip,easyfmxu/zulip,EasonYi/zulip,synicalsyntax/zulip,grave-w-grave/zulip,ikasumiwt/zulip,karamcnair/zulip,saitodisse/zulip,technicalpickles/zulip,zofuthan/zulip,vaidap/zulip,cosmicAsymmetry/zulip,he15his/zulip,vaidap/zulip,ufosky-server/zulip,johnny9/zulip,j831/zulip,aps-sids/zulip,grave-w-grave/zulip,moria/zulip,sharmaeklavya2/zulip,kou/zulip,so0k/zulip,aakash-cr7/zulip,wweiradio/zulip,isht3/zulip,Juanvulcano/zulip,ryansnowboarder/zulip,atomic-labs/zulip,sharmaeklavya2/zulip,itnihao/zulip,aliceriot/zulip,PhilSk/zulip,dhcrzf/zulip,seapasulli/zulip,Vallher/zulip,natanovia/zulip,rishig/zulip,developerfm/zulip,ahmadassaf/zulip,LeeRisk/zulip,glovebx/zulip,natanovia/zulip,luyifan/zulip,ashwinirudrappa/zulip,tbutter/zulip,MayB/zulip,glovebx/zulip,amallia/zulip,aps-sids/zulip,aakash-cr7/zulip,amallia/zulip,wangdeshui/zulip,gkotian/zulip,shaunstanislaus/zulip,AZtheAsian/zulip,kokoar/zulip,gkotian/zulip,Drooids/zulip,tiansiyuan/zulip,umkay/zulip,ryansnowboarder/zulip,shrikrishnaholla/zulip,ufosky-server/zulip,bluesea/zulip,seapasulli/zulip,Gabriel0402/zulip,MariaFaBella85/zulip,jrowan/zulip,DazWorrall/zulip,jerryge/zulip,timabbott/zulip,wweiradio/zulip,umkay/zulip,itnihao/zulip,hackerkid/zulip,schatt/zulip,Gabriel0402/zulip,stamhe/zulip,jainayush975/zulip,tdr130/zulip,aakash-cr7/zulip,susansls/zulip,huangkebo/zulip,timabbott/zulip,mdavid/zulip,itnihao/zulip,jphilipsen05/zulip,sup95/zulip,xuanhan863/zulip,EasonYi/zulip,arpith/zulip,mansilladev/zulip,wavelets/zulip,ipernet/zulip,aps-sids/zulip,dnmfarrell/zulip,jessedhillon/zulip,christi3k/zulip,dnmfarrell/zulip,so0k/zulip,rishig/zulip,grave-w-grave/zulip,natanovia/zulip,jackrzhang/zulip,ryanbackman/zulip,zulip/zulip,jimmy54/zulip,zwily/zulip,swinghu/zulip,ahmadassaf/zulip,gigawhitlocks/zulip,yuvipanda/zulip,arpitpanwar/zulip,ericzhou2008/zulip,noroot/zulip,SmartPeople/zulip,isht3/zulip,pradiptad/zulip,vikas-parashar/zulip,johnnygaddarr/zulip,Jianchun1/zulip,levixie/zulip,ahmadassaf/zulip,schatt/zulip,eastlhu/zulip,natanovia/zulip,dattatreya303/zulip,jackrzhang/zulip,hengqujushi/zulip,stamhe/zulip,gkotian/zulip,deer-hope/zulip,armooo/zulip,peguin40/zulip,dnmfarrell/zulip,hustlzp/zulip,Jianchun1/zulip,kokoar/zulip,firstblade/zulip,jerryge/zulip,samatdav/zulip,Diptanshu8/zulip,jerryge/zulip,mansilladev/zulip,sup95/zulip,KingxBanana/zulip,joyhchen/zulip,he15his/zulip,Suninus/zulip,dxq-git/zulip,reyha/zulip,itnihao/zulip,swinghu/zulip,ikasumiwt/zulip,saitodisse/zulip,yocome/zulip,rht/zulip,themass/zulip,Frouk/zulip,christi3k/zulip,verma-varsha/zulip,umkay/zulip,dawran6/zulip,xuxiao/zulip,PhilSk/zulip,seapasulli/zulip,Qgap/zulip,tdr130/zulip,blaze225/zulip,gkotian/zulip,MayB/zulip,dnmfarrell/zulip,JPJPJPOPOP/zulip,zachallaun/zulip,proliming/zulip,willingc/zulip,huangkebo/zulip,wangdeshui/zulip,tommyip/zulip,armooo/zulip,Jianchun1/zulip,ikasumiwt/zulip,dhcrzf/zulip,ikasumiwt/zulip,bitemyapp/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,bowlofstew/zulip,hj3938/zulip,noroot/zulip,samatdav/zulip,zwily/zulip,zorojean/zulip,cosmicAsymmetry/zulip,suxinde2009/zulip,amallia/zulip,bssrdf/zulip,kokoar/zulip,samatdav/zulip,avastu/zulip,bssrdf/zulip,zachallaun/zulip,verma-varsha/zulip,pradiptad/zulip,proliming/zulip,fw1121/zulip,moria/zulip,LeeRisk/zulip,armooo/zulip,JanzTam/zulip,schatt/zulip,zacps/zulip,firstblade/zulip,Gabriel0402/zulip,amallia/zulip,blaze225/zulip,babbage/zulip,avastu/zulip,joyhchen/zulip,MayB/zulip,arpitpanwar/zulip,jimmy54/zulip,stamhe/zulip,proliming/zulip,j831/zulip,ikasumiwt/zulip,reyha/zulip,souravbadami/zulip,vikas-parashar/zulip,ipernet/zulip,jessedhillon/zulip,ericzhou2008/zulip,bowlofstew/zulip,guiquanz/zulip,fw1121/zulip,peguin40/zulip,punchagan/zulip,shrikrishnaholla/zulip,bitemyapp/zulip,RobotCaleb/zulip,tbutter/zulip,ashwinirudrappa/zulip,willingc/zulip,zofuthan/zulip,EasonYi/zulip,moria/zulip,eeshangarg/zulip,brockwhittaker/zulip,stamhe/zulip,littledogboy/zulip,joshisa/zulip,atomic-labs/zulip,hackerkid/zulip,KingxBanana/zulip,Qgap/zulip,mahim97/zulip,saitodisse/zulip,EasonYi/zulip,shaunstanislaus/zulip,zhaoweigg/zulip,praveenaki/zulip,codeKonami/zulip,kaiyuanheshang/zulip,levixie/zulip,littledogboy/zulip,udxxabp/zulip,amanharitsh123/zulip,timabbott/zulip,hengqujushi/zulip,j831/zulip,tommyip/zulip,dxq-git/zulip,brockwhittaker/zulip,timabbott/zulip,synicalsyntax/zulip,Vallher/zulip,peiwei/zulip,synicalsyntax/zulip,aliceriot/zulip,mohsenSy/zulip,andersk/zulip,tiansiyuan/zulip,amanharitsh123/zulip,jonesgithub/zulip,akuseru/zulip,j831/zulip,Jianchun1/zulip,dnmfarrell/zulip,calvinleenyc/zulip,zacps/zulip,RobotCaleb/zulip,littledogboy/zulip,blaze225/zulip,noroot/zulip,stamhe/zulip,dxq-git/zulip,adnanh/zulip,zofuthan/zulip,luyifan/zulip,huangkebo/zulip,johnny9/zulip,hengqujushi/zulip,noroot/zulip,seapasulli/zulip,themass/zulip,zhaoweigg/zulip,bowlofstew/zulip,KJin99/zulip,KingxBanana/zulip,calvinleenyc/zulip,m1ssou/zulip,Frouk/zulip,bssrdf/zulip,RobotCaleb/zulip,amanharitsh123/zulip,vakila/zulip,nicholasbs/zulip,verma-varsha/zulip,wweiradio/zulip,hayderimran7/zulip,xuxiao/zulip,willingc/zulip,joyhchen/zulip,hayderimran7/zulip,rht/zulip,jonesgithub/zulip,joyhchen/zulip,Galexrt/zulip,ryanbackman/zulip,Diptanshu8/zulip,aliceriot/zulip,hackerkid/zulip,verma-varsha/zulip,saitodisse/zulip,xuxiao/zulip,peguin40/zulip,Drooids/zulip,atomic-labs/zulip,levixie/zulip,samatdav/zulip,Frouk/zulip,kaiyuanheshang/zulip,hj3938/zulip,ryanbackman/zulip,amyliu345/zulip,dawran6/zulip,eastlhu/zulip,joshisa/zulip,Frouk/zulip,Juanvulcano/zulip,codeKonami/zulip,udxxabp/zulip,rishig/zulip,vakila/zulip,zofuthan/zulip,tbutter/zulip,hustlzp/zulip,eastlhu/zulip,ApsOps/zulip,eeshangarg/zulip,KJin99/zulip,seapasulli/zulip,voidException/zulip,qq1012803704/zulip,MariaFaBella85/zulip,kou/zulip,tiansiyuan/zulip,souravbadami/zulip,gigawhitlocks/zulip,j831/zulip,guiquanz/zulip,schatt/zulip,wdaher/zulip,suxinde2009/zulip,dawran6/zulip,jainayush975/zulip,kou/zulip,zachallaun/zulip,sup95/zulip,sonali0901/zulip,Batterfii/zulip,brainwane/zulip,nicholasbs/zulip,bssrdf/zulip,mahim97/zulip,gigawhitlocks/zulip,mansilladev/zulip,vakila/zulip,fw1121/zulip,lfranchi/zulip,Suninus/zulip,esander91/zulip,ashwinirudrappa/zulip,jonesgithub/zulip,KJin99/zulip,christi3k/zulip,mdavid/zulip,peiwei/zulip,m1ssou/zulip,SmartPeople/zulip,PhilSk/zulip,wavelets/zulip,niftynei/zulip,m1ssou/zulip,LAndreas/zulip,nicholasbs/zulip,esander91/zulip,praveenaki/zulip,jimmy54/zulip,rht/zulip,hayderimran7/zulip,akuseru/zulip,jackrzhang/zulip,jackrzhang/zulip,calvinleenyc/zulip,samatdav/zulip,Galexrt/zulip,jeffcao/zulip,johnny9/zulip,aliceriot/zulip,bitemyapp/zulip,jessedhillon/zulip,bluesea/zulip,ryansnowboarder/zulip,hj3938/zulip,ApsOps/zulip,hj3938/zulip,he15his/zulip,technicalpickles/zulip,mahim97/zulip,LAndreas/zulip,paxapy/zulip,amyliu345/zulip,jessedhillon/zulip,hafeez3000/zulip,KJin99/zulip,noroot/zulip,Suninus/zulip,rishig/zulip,ericzhou2008/zulip,wdaher/zulip,zachallaun/zulip,bluesea/zulip,akuseru/zulip,shaunstanislaus/zulip,aakash-cr7/zulip,zhaoweigg/zulip,samatdav/zulip,tommyip/zulip,vaidap/zulip,willingc/zulip,adnanh/zulip,Gabriel0402/zulip,yocome/zulip,dawran6/zulip,sonali0901/zulip,tdr130/zulip,gkotian/zulip,thomasboyt/zulip,Diptanshu8/zulip,ApsOps/zulip,dhcrzf/zulip,JanzTam/zulip,zulip/zulip,PaulPetring/zulip,kaiyuanheshang/zulip,arpith/zulip,babbage/zulip,susansls/zulip,zulip/zulip,Drooids/zulip,Galexrt/zulip,qq1012803704/zulip,mdavid/zulip,amallia/zulip,ashwinirudrappa/zulip,zulip/zulip,dattatreya303/zulip,rishig/zulip,PhilSk/zulip,niftynei/zulip,zacps/zulip,shubhamdhama/zulip,glovebx/zulip,tbutter/zulip,isht3/zulip,voidException/zulip,PaulPetring/zulip,Qgap/zulip,MariaFaBella85/zulip,LAndreas/zulip,krtkmj/zulip,voidException/zulip,jimmy54/zulip,tommyip/zulip,ipernet/zulip,MayB/zulip,yuvipanda/zulip,vabs22/zulip,zorojean/zulip,andersk/zulip,AZtheAsian/zulip,lfranchi/zulip,JPJPJPOPOP/zulip,so0k/zulip,yuvipanda/zulip,kokoar/zulip,jerryge/zulip,vaidap/zulip,so0k/zulip,swinghu/zulip,hengqujushi/zulip,thomasboyt/zulip,jphilipsen05/zulip,brainwane/zulip,dwrpayne/zulip,kokoar/zulip,hafeez3000/zulip,Jianchun1/zulip,JanzTam/zulip,arpitpanwar/zulip,ryansnowboarder/zulip,christi3k/zulip,AZtheAsian/zulip,LeeRisk/zulip,schatt/zulip,adnanh/zulip,nicholasbs/zulip,ipernet/zulip,nicholasbs/zulip,hackerkid/zulip,punchagan/zulip,amyliu345/zulip,vabs22/zulip,TigorC/zulip,xuxiao/zulip,LAndreas/zulip,kokoar/zulip,vabs22/zulip,so0k/zulip,jeffcao/zulip,Jianchun1/zulip,ufosky-server/zulip,blaze225/zulip,zwily/zulip,zofuthan/zulip,bastianh/zulip,eeshangarg/zulip,stamhe/zulip,so0k/zulip,alliejones/zulip,glovebx/zulip,ryansnowboarder/zulip,MayB/zulip,hafeez3000/zulip,shubhamdhama/zulip,jphilipsen05/zulip,udxxabp/zulip,sharmaeklavya2/zulip,zacps/zulip,christi3k/zulip,vakila/zulip,Frouk/zulip,JanzTam/zulip,showell/zulip,showell/zulip,thomasboyt/zulip,hj3938/zulip,jainayush975/zulip,jrowan/zulip,bowlofstew/zulip,arpith/zulip,sonali0901/zulip,akuseru/zulip,Drooids/zulip,yocome/zulip,LAndreas/zulip,shubhamdhama/zulip,johnny9/zulip
|
Add tool for sending Github payloads to a local instance
This is very useful for testing our Github integration.
(imported from commit c61fd883c599395d31416a25090e57594fddeadf)
|
#!/usr/bin/python
import sys
import os
import simplejson
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'api'))
import zulip
zulip_client = zulip.Client(site="http://localhost:9991")
payload_dir = "zerver/fixtures/github"
for filename in os.listdir(payload_dir):
with open(os.path.join(payload_dir, filename)) as f:
req = simplejson.loads(f.read())
req['api-key'] = zulip_client.api_key
req['email'] = zulip_client.email
zulip_client.do_api_query(req, zulip.API_VERSTRING + "external/github")
|
<commit_before><commit_msg>Add tool for sending Github payloads to a local instance
This is very useful for testing our Github integration.
(imported from commit c61fd883c599395d31416a25090e57594fddeadf)<commit_after>
|
#!/usr/bin/python
import sys
import os
import simplejson
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'api'))
import zulip
zulip_client = zulip.Client(site="http://localhost:9991")
payload_dir = "zerver/fixtures/github"
for filename in os.listdir(payload_dir):
with open(os.path.join(payload_dir, filename)) as f:
req = simplejson.loads(f.read())
req['api-key'] = zulip_client.api_key
req['email'] = zulip_client.email
zulip_client.do_api_query(req, zulip.API_VERSTRING + "external/github")
|
Add tool for sending Github payloads to a local instance
This is very useful for testing our Github integration.
(imported from commit c61fd883c599395d31416a25090e57594fddeadf)#!/usr/bin/python
import sys
import os
import simplejson
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'api'))
import zulip
zulip_client = zulip.Client(site="http://localhost:9991")
payload_dir = "zerver/fixtures/github"
for filename in os.listdir(payload_dir):
with open(os.path.join(payload_dir, filename)) as f:
req = simplejson.loads(f.read())
req['api-key'] = zulip_client.api_key
req['email'] = zulip_client.email
zulip_client.do_api_query(req, zulip.API_VERSTRING + "external/github")
|
<commit_before><commit_msg>Add tool for sending Github payloads to a local instance
This is very useful for testing our Github integration.
(imported from commit c61fd883c599395d31416a25090e57594fddeadf)<commit_after>#!/usr/bin/python
import sys
import os
import simplejson
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'api'))
import zulip
zulip_client = zulip.Client(site="http://localhost:9991")
payload_dir = "zerver/fixtures/github"
for filename in os.listdir(payload_dir):
with open(os.path.join(payload_dir, filename)) as f:
req = simplejson.loads(f.read())
req['api-key'] = zulip_client.api_key
req['email'] = zulip_client.email
zulip_client.do_api_query(req, zulip.API_VERSTRING + "external/github")
|
|
b5d367c172a8bdb3a9c165041a63ffe569c2c28b
|
examples/xml_to_json.py
|
examples/xml_to_json.py
|
#!\usr\bin\env python
import os
import sys
import cybox.bindings.cybox_core_1_0 as core_binding
from cybox.core import Observables
def from_file(filename):
cybox_obj = core_binding.parse(os.path.abspath(filename))
return Observables.from_obj(cybox_obj)
def main():
if len(sys.argv) < 2:
print "Argument required"
return
print from_file(sys.argv[1]).to_json()
if __name__ == "__main__":
main()
|
Add script to convert CybOX XML to CybOX JSON.
|
Add script to convert CybOX XML to CybOX JSON.
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
Add script to convert CybOX XML to CybOX JSON.
|
#!\usr\bin\env python
import os
import sys
import cybox.bindings.cybox_core_1_0 as core_binding
from cybox.core import Observables
def from_file(filename):
cybox_obj = core_binding.parse(os.path.abspath(filename))
return Observables.from_obj(cybox_obj)
def main():
if len(sys.argv) < 2:
print "Argument required"
return
print from_file(sys.argv[1]).to_json()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to convert CybOX XML to CybOX JSON.<commit_after>
|
#!\usr\bin\env python
import os
import sys
import cybox.bindings.cybox_core_1_0 as core_binding
from cybox.core import Observables
def from_file(filename):
cybox_obj = core_binding.parse(os.path.abspath(filename))
return Observables.from_obj(cybox_obj)
def main():
if len(sys.argv) < 2:
print "Argument required"
return
print from_file(sys.argv[1]).to_json()
if __name__ == "__main__":
main()
|
Add script to convert CybOX XML to CybOX JSON.#!\usr\bin\env python
import os
import sys
import cybox.bindings.cybox_core_1_0 as core_binding
from cybox.core import Observables
def from_file(filename):
cybox_obj = core_binding.parse(os.path.abspath(filename))
return Observables.from_obj(cybox_obj)
def main():
if len(sys.argv) < 2:
print "Argument required"
return
print from_file(sys.argv[1]).to_json()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to convert CybOX XML to CybOX JSON.<commit_after>#!\usr\bin\env python
import os
import sys
import cybox.bindings.cybox_core_1_0 as core_binding
from cybox.core import Observables
def from_file(filename):
cybox_obj = core_binding.parse(os.path.abspath(filename))
return Observables.from_obj(cybox_obj)
def main():
if len(sys.argv) < 2:
print "Argument required"
return
print from_file(sys.argv[1]).to_json()
if __name__ == "__main__":
main()
|
|
4f369b62be57052e534ee48015a383db6c3c7468
|
util/web/websockets.py
|
util/web/websockets.py
|
# Oxypanel
# File: util/web/websockets.py
# Desc: helpers to make authed websocket requests
from uuid import uuid4
import config
from app import redis_client
from util.web.user import get_current_user
def make_websocket_request(websocket, websocket_data):
# Generate request key
request_key = str(uuid4())
user = get_current_user()
# Add Redis hash set for websocket processor
redis_client.hmset(
'{0}{1}'.format(config.REDIS['WEBSOCKET_REQUEST_PREFIX'], request_key), {
'user_id': user.id,
'websocket': websocket,
'websocket_data': websocket_data
}
)
return request_key
|
Add helpers to create websocket requests
|
Add helpers to create websocket requests
|
Python
|
mit
|
oxyio/oxyio,oxyio/oxyio,oxyio/oxyio,oxyio/oxyio
|
Add helpers to create websocket requests
|
# Oxypanel
# File: util/web/websockets.py
# Desc: helpers to make authed websocket requests
from uuid import uuid4
import config
from app import redis_client
from util.web.user import get_current_user
def make_websocket_request(websocket, websocket_data):
# Generate request key
request_key = str(uuid4())
user = get_current_user()
# Add Redis hash set for websocket processor
redis_client.hmset(
'{0}{1}'.format(config.REDIS['WEBSOCKET_REQUEST_PREFIX'], request_key), {
'user_id': user.id,
'websocket': websocket,
'websocket_data': websocket_data
}
)
return request_key
|
<commit_before><commit_msg>Add helpers to create websocket requests<commit_after>
|
# Oxypanel
# File: util/web/websockets.py
# Desc: helpers to make authed websocket requests
from uuid import uuid4
import config
from app import redis_client
from util.web.user import get_current_user
def make_websocket_request(websocket, websocket_data):
# Generate request key
request_key = str(uuid4())
user = get_current_user()
# Add Redis hash set for websocket processor
redis_client.hmset(
'{0}{1}'.format(config.REDIS['WEBSOCKET_REQUEST_PREFIX'], request_key), {
'user_id': user.id,
'websocket': websocket,
'websocket_data': websocket_data
}
)
return request_key
|
Add helpers to create websocket requests# Oxypanel
# File: util/web/websockets.py
# Desc: helpers to make authed websocket requests
from uuid import uuid4
import config
from app import redis_client
from util.web.user import get_current_user
def make_websocket_request(websocket, websocket_data):
# Generate request key
request_key = str(uuid4())
user = get_current_user()
# Add Redis hash set for websocket processor
redis_client.hmset(
'{0}{1}'.format(config.REDIS['WEBSOCKET_REQUEST_PREFIX'], request_key), {
'user_id': user.id,
'websocket': websocket,
'websocket_data': websocket_data
}
)
return request_key
|
<commit_before><commit_msg>Add helpers to create websocket requests<commit_after># Oxypanel
# File: util/web/websockets.py
# Desc: helpers to make authed websocket requests
from uuid import uuid4
import config
from app import redis_client
from util.web.user import get_current_user
def make_websocket_request(websocket, websocket_data):
# Generate request key
request_key = str(uuid4())
user = get_current_user()
# Add Redis hash set for websocket processor
redis_client.hmset(
'{0}{1}'.format(config.REDIS['WEBSOCKET_REQUEST_PREFIX'], request_key), {
'user_id': user.id,
'websocket': websocket,
'websocket_data': websocket_data
}
)
return request_key
|
|
c8c616dffcce4a8083e3415607b07da6ae1adc7a
|
2019/aoc2019/day13.py
|
2019/aoc2019/day13.py
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
Implement 2019 day 13 part 1
|
Implement 2019 day 13 part 1
|
Python
|
mit
|
bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode
|
Implement 2019 day 13 part 1
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
<commit_before><commit_msg>Implement 2019 day 13 part 1<commit_after>
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
Implement 2019 day 13 part 1from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
<commit_before><commit_msg>Implement 2019 day 13 part 1<commit_after>from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
|
a7777a049dab6e3d4b99f8259ebfa39a31d32ae8
|
IPython/lib/tests/test_clipboard.py
|
IPython/lib/tests/test_clipboard.py
|
import nose.tools as nt
from IPython.core.error import TryNext
from IPython.lib.clipboard import ClipboardEmpty
from IPython.utils.py3compat import unicode_type
def test_clipboard_get():
# Smoketest for clipboard access - we can't easily guarantee that the
# clipboard is accessible and has something on it, but this tries to
# exercise the relevant code anyway.
try:
a = get_ipython().hooks.clipboard_get()
except ClipboardEmpty:
# Nothing in clipboard to get
pass
except TryNext:
# No clipboard access API available
pass
else:
nt.assert_is_instance(a, unicode_type)
|
Add simple smoketest for clipboard access
|
Add simple smoketest for clipboard access
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Add simple smoketest for clipboard access
|
import nose.tools as nt
from IPython.core.error import TryNext
from IPython.lib.clipboard import ClipboardEmpty
from IPython.utils.py3compat import unicode_type
def test_clipboard_get():
# Smoketest for clipboard access - we can't easily guarantee that the
# clipboard is accessible and has something on it, but this tries to
# exercise the relevant code anyway.
try:
a = get_ipython().hooks.clipboard_get()
except ClipboardEmpty:
# Nothing in clipboard to get
pass
except TryNext:
# No clipboard access API available
pass
else:
nt.assert_is_instance(a, unicode_type)
|
<commit_before><commit_msg>Add simple smoketest for clipboard access<commit_after>
|
import nose.tools as nt
from IPython.core.error import TryNext
from IPython.lib.clipboard import ClipboardEmpty
from IPython.utils.py3compat import unicode_type
def test_clipboard_get():
# Smoketest for clipboard access - we can't easily guarantee that the
# clipboard is accessible and has something on it, but this tries to
# exercise the relevant code anyway.
try:
a = get_ipython().hooks.clipboard_get()
except ClipboardEmpty:
# Nothing in clipboard to get
pass
except TryNext:
# No clipboard access API available
pass
else:
nt.assert_is_instance(a, unicode_type)
|
Add simple smoketest for clipboard accessimport nose.tools as nt
from IPython.core.error import TryNext
from IPython.lib.clipboard import ClipboardEmpty
from IPython.utils.py3compat import unicode_type
def test_clipboard_get():
# Smoketest for clipboard access - we can't easily guarantee that the
# clipboard is accessible and has something on it, but this tries to
# exercise the relevant code anyway.
try:
a = get_ipython().hooks.clipboard_get()
except ClipboardEmpty:
# Nothing in clipboard to get
pass
except TryNext:
# No clipboard access API available
pass
else:
nt.assert_is_instance(a, unicode_type)
|
<commit_before><commit_msg>Add simple smoketest for clipboard access<commit_after>import nose.tools as nt
from IPython.core.error import TryNext
from IPython.lib.clipboard import ClipboardEmpty
from IPython.utils.py3compat import unicode_type
def test_clipboard_get():
# Smoketest for clipboard access - we can't easily guarantee that the
# clipboard is accessible and has something on it, but this tries to
# exercise the relevant code anyway.
try:
a = get_ipython().hooks.clipboard_get()
except ClipboardEmpty:
# Nothing in clipboard to get
pass
except TryNext:
# No clipboard access API available
pass
else:
nt.assert_is_instance(a, unicode_type)
|
|
d01cae37ecb8f52814dee98ad27aa91b6b9be8c9
|
animerec/user_info.py
|
animerec/user_info.py
|
def get_user_info(username):
""" Retrieves information via MyAnimeList's API about a user's viewed anime and the
corresponding ratings. It only takes into account non-zero ratings.
Parameters:
-----------
username : string
Username of the MyAnimeList user whose data to pull.
Returns:
--------
seen_titles, seen_ratings : (List of strings, List of ints)
seen_id: a list with anime_id which the user has seen
seen_ratings: a list with the ratings for each corresponding title.
"""
#First, get XML data based on username
import requests
query = 'https://myanimelist.net/malappinfo.php?u=%s&status=all&type=anime' % username
r = requests.get(query)
if r.status_code != requests.codes.ok:
print ("Error processing request. Try again")
import sys; sys.exit()
#Now, parse XML data
from lxml import etree
doc = etree.fromstring(r.content)
ids = doc.xpath('.//series_animedb_id/text()')
ratings = doc.xpath('.//my_score/text()')
#Now take the data and construct a rating for them.
from itertools import compress
mask = [rating != '0' for rating in ratings]
seen_id = list(map(int, compress(ids, mask)))
seen_ratings = list(map(int, compress(ratings, mask)))
return seen_id, seen_ratings
|
Use MAL API to get watched show information.
|
Use MAL API to get watched show information.
|
Python
|
mit
|
vishaalprasad/AnimeRecommendation
|
Use MAL API to get watched show information.
|
def get_user_info(username):
""" Retrieves information via MyAnimeList's API about a user's viewed anime and the
corresponding ratings. It only takes into account non-zero ratings.
Parameters:
-----------
username : string
Username of the MyAnimeList user whose data to pull.
Returns:
--------
seen_titles, seen_ratings : (List of strings, List of ints)
seen_id: a list with anime_id which the user has seen
seen_ratings: a list with the ratings for each corresponding title.
"""
#First, get XML data based on username
import requests
query = 'https://myanimelist.net/malappinfo.php?u=%s&status=all&type=anime' % username
r = requests.get(query)
if r.status_code != requests.codes.ok:
print ("Error processing request. Try again")
import sys; sys.exit()
#Now, parse XML data
from lxml import etree
doc = etree.fromstring(r.content)
ids = doc.xpath('.//series_animedb_id/text()')
ratings = doc.xpath('.//my_score/text()')
#Now take the data and construct a rating for them.
from itertools import compress
mask = [rating != '0' for rating in ratings]
seen_id = list(map(int, compress(ids, mask)))
seen_ratings = list(map(int, compress(ratings, mask)))
return seen_id, seen_ratings
|
<commit_before><commit_msg>Use MAL API to get watched show information.<commit_after>
|
def get_user_info(username):
""" Retrieves information via MyAnimeList's API about a user's viewed anime and the
corresponding ratings. It only takes into account non-zero ratings.
Parameters:
-----------
username : string
Username of the MyAnimeList user whose data to pull.
Returns:
--------
seen_titles, seen_ratings : (List of strings, List of ints)
seen_id: a list with anime_id which the user has seen
seen_ratings: a list with the ratings for each corresponding title.
"""
#First, get XML data based on username
import requests
query = 'https://myanimelist.net/malappinfo.php?u=%s&status=all&type=anime' % username
r = requests.get(query)
if r.status_code != requests.codes.ok:
print ("Error processing request. Try again")
import sys; sys.exit()
#Now, parse XML data
from lxml import etree
doc = etree.fromstring(r.content)
ids = doc.xpath('.//series_animedb_id/text()')
ratings = doc.xpath('.//my_score/text()')
#Now take the data and construct a rating for them.
from itertools import compress
mask = [rating != '0' for rating in ratings]
seen_id = list(map(int, compress(ids, mask)))
seen_ratings = list(map(int, compress(ratings, mask)))
return seen_id, seen_ratings
|
Use MAL API to get watched show information.def get_user_info(username):
""" Retrieves information via MyAnimeList's API about a user's viewed anime and the
corresponding ratings. It only takes into account non-zero ratings.
Parameters:
-----------
username : string
Username of the MyAnimeList user whose data to pull.
Returns:
--------
seen_titles, seen_ratings : (List of strings, List of ints)
seen_id: a list with anime_id which the user has seen
seen_ratings: a list with the ratings for each corresponding title.
"""
#First, get XML data based on username
import requests
query = 'https://myanimelist.net/malappinfo.php?u=%s&status=all&type=anime' % username
r = requests.get(query)
if r.status_code != requests.codes.ok:
print ("Error processing request. Try again")
import sys; sys.exit()
#Now, parse XML data
from lxml import etree
doc = etree.fromstring(r.content)
ids = doc.xpath('.//series_animedb_id/text()')
ratings = doc.xpath('.//my_score/text()')
#Now take the data and construct a rating for them.
from itertools import compress
mask = [rating != '0' for rating in ratings]
seen_id = list(map(int, compress(ids, mask)))
seen_ratings = list(map(int, compress(ratings, mask)))
return seen_id, seen_ratings
|
<commit_before><commit_msg>Use MAL API to get watched show information.<commit_after>def get_user_info(username):
""" Retrieves information via MyAnimeList's API about a user's viewed anime and the
corresponding ratings. It only takes into account non-zero ratings.
Parameters:
-----------
username : string
Username of the MyAnimeList user whose data to pull.
Returns:
--------
seen_titles, seen_ratings : (List of strings, List of ints)
seen_id: a list with anime_id which the user has seen
seen_ratings: a list with the ratings for each corresponding title.
"""
#First, get XML data based on username
import requests
query = 'https://myanimelist.net/malappinfo.php?u=%s&status=all&type=anime' % username
r = requests.get(query)
if r.status_code != requests.codes.ok:
print ("Error processing request. Try again")
import sys; sys.exit()
#Now, parse XML data
from lxml import etree
doc = etree.fromstring(r.content)
ids = doc.xpath('.//series_animedb_id/text()')
ratings = doc.xpath('.//my_score/text()')
#Now take the data and construct a rating for them.
from itertools import compress
mask = [rating != '0' for rating in ratings]
seen_id = list(map(int, compress(ids, mask)))
seen_ratings = list(map(int, compress(ratings, mask)))
return seen_id, seen_ratings
|
|
0663abd5e89bc7101fc1cfc803e7ffbfd5432af0
|
test/test_exc.py
|
test/test_exc.py
|
import unittest
from u2flib_host import exc
class APDUErrorTest(unittest.TestCase):
def test_init(self):
error = exc.APDUError(0x3039)
self.assertEqual(error.args[0], '0x3039')
self.assertEqual(error.code, 0x3039)
self.assertEqual(error.sw1, 0x30)
self.assertEqual(error.sw2, 0x39)
|
Add rudimentary unit test for APDUError
|
Add rudimentary unit test for APDUError
|
Python
|
bsd-2-clause
|
Yubico/python-u2flib-host
|
Add rudimentary unit test for APDUError
|
import unittest
from u2flib_host import exc
class APDUErrorTest(unittest.TestCase):
def test_init(self):
error = exc.APDUError(0x3039)
self.assertEqual(error.args[0], '0x3039')
self.assertEqual(error.code, 0x3039)
self.assertEqual(error.sw1, 0x30)
self.assertEqual(error.sw2, 0x39)
|
<commit_before><commit_msg>Add rudimentary unit test for APDUError<commit_after>
|
import unittest
from u2flib_host import exc
class APDUErrorTest(unittest.TestCase):
def test_init(self):
error = exc.APDUError(0x3039)
self.assertEqual(error.args[0], '0x3039')
self.assertEqual(error.code, 0x3039)
self.assertEqual(error.sw1, 0x30)
self.assertEqual(error.sw2, 0x39)
|
Add rudimentary unit test for APDUError
import unittest
from u2flib_host import exc
class APDUErrorTest(unittest.TestCase):
def test_init(self):
error = exc.APDUError(0x3039)
self.assertEqual(error.args[0], '0x3039')
self.assertEqual(error.code, 0x3039)
self.assertEqual(error.sw1, 0x30)
self.assertEqual(error.sw2, 0x39)
|
<commit_before><commit_msg>Add rudimentary unit test for APDUError<commit_after>
import unittest
from u2flib_host import exc
class APDUErrorTest(unittest.TestCase):
def test_init(self):
error = exc.APDUError(0x3039)
self.assertEqual(error.args[0], '0x3039')
self.assertEqual(error.code, 0x3039)
self.assertEqual(error.sw1, 0x30)
self.assertEqual(error.sw2, 0x39)
|
|
9c7ec15d6347d915cd0285927ed651d4ae6508c1
|
amen/time.py
|
amen/time.py
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time, self.duration
return '<TimeSlice, start:{0:.2f}, duration:{1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time.delta / 1000000000.0, self.duration.delta / 1000000000.0
return '<TimeSlice, start: {0:.2f}, duration: {1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
Fix formatting bug in repr
|
Fix formatting bug in repr
|
Python
|
bsd-2-clause
|
algorithmic-music-exploration/amen,algorithmic-music-exploration/amen
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time, self.duration
return '<TimeSlice, start:{0:.2f}, duration:{1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
Fix formatting bug in repr
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time.delta / 1000000000.0, self.duration.delta / 1000000000.0
return '<TimeSlice, start: {0:.2f}, duration: {1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
<commit_before>#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time, self.duration
return '<TimeSlice, start:{0:.2f}, duration:{1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
<commit_msg>Fix formatting bug in repr<commit_after>
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time.delta / 1000000000.0, self.duration.delta / 1000000000.0
return '<TimeSlice, start: {0:.2f}, duration: {1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time, self.duration
return '<TimeSlice, start:{0:.2f}, duration:{1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
Fix formatting bug in repr#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time.delta / 1000000000.0, self.duration.delta / 1000000000.0
return '<TimeSlice, start: {0:.2f}, duration: {1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
<commit_before>#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time, self.duration
return '<TimeSlice, start:{0:.2f}, duration:{1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
<commit_msg>Fix formatting bug in repr<commit_after>#!/usr/bin/env python
import six
import numpy as np
import pandas as pd
class TimeSlice(object):
"""
A slice of time: has a start time, a duration, and a reference to an Audio object.
"""
def __init__(self, time, duration, audio, unit='s'):
self.time = pd.to_timedelta(time, unit=unit)
self.duration = pd.to_timedelta(duration, unit=unit)
self.audio = audio
def __repr__(self):
args = self.time.delta / 1000000000.0, self.duration.delta / 1000000000.0
return '<TimeSlice, start: {0:.2f}, duration: {1:.2f}'.format(*args)
class TimingList(list):
"""
A list of TimeSlices.
"""
def __init__(self, name, timings, audio, unit='s'):
# This assumes that we're going to get a list of tuples (start, duration) from librosa,
# which may or may not be true.
self.name = name
for (start, duration) in timings:
slice = TimeSlice(start, duration, audio, unit=unit)
self.append(slice)
|
a15afdd02582267a4d9cb6487f7d3e2fe15d0a72
|
lib/ansiblelint/rules/TaskHasNameRule.py
|
lib/ansiblelint/rules/TaskHasNameRule.py
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
def matchtask(self, file, task):
return task.get('name', '') == ''
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
_nameless_tasks = ['meta', 'debug']
def matchtask(self, file, task):
return task.get('name', '') == '' and \
task["action"]["__ansible_module__"] not in self._nameless_tasks
|
Allow meta and debug tasks to not be named
|
Allow meta and debug tasks to not be named
Fixes #176
|
Python
|
mit
|
MatrixCrawler/ansible-lint,dataxu/ansible-lint,willthames/ansible-lint
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
def matchtask(self, file, task):
return task.get('name', '') == ''
Allow meta and debug tasks to not be named
Fixes #176
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
_nameless_tasks = ['meta', 'debug']
def matchtask(self, file, task):
return task.get('name', '') == '' and \
task["action"]["__ansible_module__"] not in self._nameless_tasks
|
<commit_before># Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
def matchtask(self, file, task):
return task.get('name', '') == ''
<commit_msg>Allow meta and debug tasks to not be named
Fixes #176<commit_after>
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
_nameless_tasks = ['meta', 'debug']
def matchtask(self, file, task):
return task.get('name', '') == '' and \
task["action"]["__ansible_module__"] not in self._nameless_tasks
|
# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
def matchtask(self, file, task):
return task.get('name', '') == ''
Allow meta and debug tasks to not be named
Fixes #176# Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
_nameless_tasks = ['meta', 'debug']
def matchtask(self, file, task):
return task.get('name', '') == '' and \
task["action"]["__ansible_module__"] not in self._nameless_tasks
|
<commit_before># Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
def matchtask(self, file, task):
return task.get('name', '') == ''
<commit_msg>Allow meta and debug tasks to not be named
Fixes #176<commit_after># Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class TaskHasNameRule(AnsibleLintRule):
id = 'ANSIBLE0011'
shortdesc = 'All tasks should be named'
description = 'All tasks should have a distinct name for readability ' + \
'and for --start-at-task to work'
tags = ['readability']
_nameless_tasks = ['meta', 'debug']
def matchtask(self, file, task):
return task.get('name', '') == '' and \
task["action"]["__ansible_module__"] not in self._nameless_tasks
|
5781f0a7792c07a6d14b8a575e7eebba39c2ee28
|
tests/test_variable_registration.py
|
tests/test_variable_registration.py
|
import angr
import nose
def test_registration():
s = angr.SimState(arch='AMD64')
a1 = s.solver.BVS('a', 64, key=(1,), eternal=True)
a2 = s.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a1, a2)
b1 = s.solver.BVS('b', 64, key=(2,), eternal=False)
s1 = s.copy()
s2 = s.copy()
b2 = s1.solver.BVS('b', 64, key=(2,), eternal=False)
b3 = s2.solver.BVS('b', 64, key=(2,), eternal=False)
nose.tools.assert_is_not(b1, b2)
nose.tools.assert_is_not(b2, b3)
nose.tools.assert_is_not(b1, b3)
a3 = s1.solver.BVS('a', 64, key=(1,), eternal=True)
a4 = s2.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a2, a3)
nose.tools.assert_is(a3, a4)
nose.tools.assert_equal(len(list(s.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s2.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s.solver.get_variables(2))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(2))), 2)
nose.tools.assert_equal(len(list(s2.solver.get_variables(2))), 2)
nose.tools.assert_equal(list(s.solver.describe_variables(a1)), [(1,)])
nose.tools.assert_equal(list(s.solver.describe_variables(b1)), [(2, 1)])
nose.tools.assert_equal(sorted(list(s.solver.describe_variables(a1 + b1))), [(1,), (2, 1)])
|
Add test case for variable registration
|
Add test case for variable registration
|
Python
|
bsd-2-clause
|
f-prettyland/angr,schieb/angr,tyb0807/angr,iamahuman/angr,angr/angr,tyb0807/angr,f-prettyland/angr,f-prettyland/angr,iamahuman/angr,schieb/angr,tyb0807/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr
|
Add test case for variable registration
|
import angr
import nose
def test_registration():
s = angr.SimState(arch='AMD64')
a1 = s.solver.BVS('a', 64, key=(1,), eternal=True)
a2 = s.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a1, a2)
b1 = s.solver.BVS('b', 64, key=(2,), eternal=False)
s1 = s.copy()
s2 = s.copy()
b2 = s1.solver.BVS('b', 64, key=(2,), eternal=False)
b3 = s2.solver.BVS('b', 64, key=(2,), eternal=False)
nose.tools.assert_is_not(b1, b2)
nose.tools.assert_is_not(b2, b3)
nose.tools.assert_is_not(b1, b3)
a3 = s1.solver.BVS('a', 64, key=(1,), eternal=True)
a4 = s2.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a2, a3)
nose.tools.assert_is(a3, a4)
nose.tools.assert_equal(len(list(s.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s2.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s.solver.get_variables(2))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(2))), 2)
nose.tools.assert_equal(len(list(s2.solver.get_variables(2))), 2)
nose.tools.assert_equal(list(s.solver.describe_variables(a1)), [(1,)])
nose.tools.assert_equal(list(s.solver.describe_variables(b1)), [(2, 1)])
nose.tools.assert_equal(sorted(list(s.solver.describe_variables(a1 + b1))), [(1,), (2, 1)])
|
<commit_before><commit_msg>Add test case for variable registration<commit_after>
|
import angr
import nose
def test_registration():
s = angr.SimState(arch='AMD64')
a1 = s.solver.BVS('a', 64, key=(1,), eternal=True)
a2 = s.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a1, a2)
b1 = s.solver.BVS('b', 64, key=(2,), eternal=False)
s1 = s.copy()
s2 = s.copy()
b2 = s1.solver.BVS('b', 64, key=(2,), eternal=False)
b3 = s2.solver.BVS('b', 64, key=(2,), eternal=False)
nose.tools.assert_is_not(b1, b2)
nose.tools.assert_is_not(b2, b3)
nose.tools.assert_is_not(b1, b3)
a3 = s1.solver.BVS('a', 64, key=(1,), eternal=True)
a4 = s2.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a2, a3)
nose.tools.assert_is(a3, a4)
nose.tools.assert_equal(len(list(s.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s2.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s.solver.get_variables(2))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(2))), 2)
nose.tools.assert_equal(len(list(s2.solver.get_variables(2))), 2)
nose.tools.assert_equal(list(s.solver.describe_variables(a1)), [(1,)])
nose.tools.assert_equal(list(s.solver.describe_variables(b1)), [(2, 1)])
nose.tools.assert_equal(sorted(list(s.solver.describe_variables(a1 + b1))), [(1,), (2, 1)])
|
Add test case for variable registrationimport angr
import nose
def test_registration():
s = angr.SimState(arch='AMD64')
a1 = s.solver.BVS('a', 64, key=(1,), eternal=True)
a2 = s.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a1, a2)
b1 = s.solver.BVS('b', 64, key=(2,), eternal=False)
s1 = s.copy()
s2 = s.copy()
b2 = s1.solver.BVS('b', 64, key=(2,), eternal=False)
b3 = s2.solver.BVS('b', 64, key=(2,), eternal=False)
nose.tools.assert_is_not(b1, b2)
nose.tools.assert_is_not(b2, b3)
nose.tools.assert_is_not(b1, b3)
a3 = s1.solver.BVS('a', 64, key=(1,), eternal=True)
a4 = s2.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a2, a3)
nose.tools.assert_is(a3, a4)
nose.tools.assert_equal(len(list(s.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s2.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s.solver.get_variables(2))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(2))), 2)
nose.tools.assert_equal(len(list(s2.solver.get_variables(2))), 2)
nose.tools.assert_equal(list(s.solver.describe_variables(a1)), [(1,)])
nose.tools.assert_equal(list(s.solver.describe_variables(b1)), [(2, 1)])
nose.tools.assert_equal(sorted(list(s.solver.describe_variables(a1 + b1))), [(1,), (2, 1)])
|
<commit_before><commit_msg>Add test case for variable registration<commit_after>import angr
import nose
def test_registration():
s = angr.SimState(arch='AMD64')
a1 = s.solver.BVS('a', 64, key=(1,), eternal=True)
a2 = s.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a1, a2)
b1 = s.solver.BVS('b', 64, key=(2,), eternal=False)
s1 = s.copy()
s2 = s.copy()
b2 = s1.solver.BVS('b', 64, key=(2,), eternal=False)
b3 = s2.solver.BVS('b', 64, key=(2,), eternal=False)
nose.tools.assert_is_not(b1, b2)
nose.tools.assert_is_not(b2, b3)
nose.tools.assert_is_not(b1, b3)
a3 = s1.solver.BVS('a', 64, key=(1,), eternal=True)
a4 = s2.solver.BVS('a', 64, key=(1,), eternal=True)
nose.tools.assert_is(a2, a3)
nose.tools.assert_is(a3, a4)
nose.tools.assert_equal(len(list(s.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s2.solver.get_variables(1))), 1)
nose.tools.assert_equal(len(list(s.solver.get_variables(2))), 1)
nose.tools.assert_equal(len(list(s1.solver.get_variables(2))), 2)
nose.tools.assert_equal(len(list(s2.solver.get_variables(2))), 2)
nose.tools.assert_equal(list(s.solver.describe_variables(a1)), [(1,)])
nose.tools.assert_equal(list(s.solver.describe_variables(b1)), [(2, 1)])
nose.tools.assert_equal(sorted(list(s.solver.describe_variables(a1 + b1))), [(1,), (2, 1)])
|
|
00754100d3d9c962a3b106ebd296590ad5ccdaea
|
tests/test_errors.py
|
tests/test_errors.py
|
import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
from lark.common import UnexpectedToken # inherits from ParseError
# from lark.lexer import UnexpectedInput
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict()
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast)
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def test_invalid_keyword():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """
MAP
INVALID "setting"
END
"""
output(s)
def test_extra_end():
"""
Check an extra end keyword throws an error
"""
s = """MAP
NAME "test"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 4)
assert(ex.column == 1)
assert(str(ex.token) == 'END')
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 4)
assert(str(ex.token) == 'END')
def run_tests():
"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
# pytest.main(["tests/test_snippets.py::test_style_pattern"])
pytest.main(["tests/test_errors.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
test_missing_end()
# run_tests()
print("Done!")
|
Add tests for expected errors
|
Add tests for expected errors
|
Python
|
mit
|
geographika/mappyfile,geographika/mappyfile
|
Add tests for expected errors
|
import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
from lark.common import UnexpectedToken # inherits from ParseError
# from lark.lexer import UnexpectedInput
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict()
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast)
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def test_invalid_keyword():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """
MAP
INVALID "setting"
END
"""
output(s)
def test_extra_end():
"""
Check an extra end keyword throws an error
"""
s = """MAP
NAME "test"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 4)
assert(ex.column == 1)
assert(str(ex.token) == 'END')
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 4)
assert(str(ex.token) == 'END')
def run_tests():
"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
# pytest.main(["tests/test_snippets.py::test_style_pattern"])
pytest.main(["tests/test_errors.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
test_missing_end()
# run_tests()
print("Done!")
|
<commit_before><commit_msg>Add tests for expected errors<commit_after>
|
import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
from lark.common import UnexpectedToken # inherits from ParseError
# from lark.lexer import UnexpectedInput
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict()
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast)
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def test_invalid_keyword():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """
MAP
INVALID "setting"
END
"""
output(s)
def test_extra_end():
"""
Check an extra end keyword throws an error
"""
s = """MAP
NAME "test"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 4)
assert(ex.column == 1)
assert(str(ex.token) == 'END')
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 4)
assert(str(ex.token) == 'END')
def run_tests():
"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
# pytest.main(["tests/test_snippets.py::test_style_pattern"])
pytest.main(["tests/test_errors.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
test_missing_end()
# run_tests()
print("Done!")
|
Add tests for expected errorsimport logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
from lark.common import UnexpectedToken # inherits from ParseError
# from lark.lexer import UnexpectedInput
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict()
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast)
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def test_invalid_keyword():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """
MAP
INVALID "setting"
END
"""
output(s)
def test_extra_end():
"""
Check an extra end keyword throws an error
"""
s = """MAP
NAME "test"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 4)
assert(ex.column == 1)
assert(str(ex.token) == 'END')
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 4)
assert(str(ex.token) == 'END')
def run_tests():
"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
# pytest.main(["tests/test_snippets.py::test_style_pattern"])
pytest.main(["tests/test_errors.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
test_missing_end()
# run_tests()
print("Done!")
|
<commit_before><commit_msg>Add tests for expected errors<commit_after>import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
from lark.common import UnexpectedToken # inherits from ParseError
# from lark.lexer import UnexpectedInput
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict()
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast)
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def test_invalid_keyword():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """
MAP
INVALID "setting"
END
"""
output(s)
def test_extra_end():
"""
Check an extra end keyword throws an error
"""
s = """MAP
NAME "test"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 4)
assert(ex.column == 1)
assert(str(ex.token) == 'END')
def test_missing_end():
"""
Check an invalid keyword throws a schema validation
error
"""
s = """MAP
LAYER
NAME "Test"
LAYER
NAME "Test2"
END
END"""
p = Parser()
try:
p.parse(s)
except UnexpectedToken as ex:
print(ex.__dict__)
assert(ex.line == 7)
assert(ex.column == 4)
assert(str(ex.token) == 'END')
def run_tests():
"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
# pytest.main(["tests/test_snippets.py::test_style_pattern"])
pytest.main(["tests/test_errors.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
test_missing_end()
# run_tests()
print("Done!")
|
|
78e5463fbf9cfd87e7b9b5e31741637d8f203219
|
targets.py
|
targets.py
|
"""
Functions for managing targets.
"""
import json
import logging
import os
import glob
import services
from services import *
class Target(object):
def __init__(self, json_desc):
config = json.loads(json_desc)
self.name = config['name']
self.host = config['host']
self.services = []
for service in config['services']:
name = service['service']
plugin = services.Service.get_plugin(name)
if plugin is None:
logging.warning("Failed to load service %s from %s target: Service not found" % (name, self.name))
continue
optional = service.get('optional', False)
value = service['value']
# parse the custom config for the plugin
if name in config:
service_config = config[name]
service_config['host'] = self.host
else:
service_config = { 'host': self.host }
logging.info("Plugin %s: loaded service %s" % (self.name, name))
self.services.append( {'plugin': plugin, 'optional': optional, 'value': value, 'config': service_config } )
def load_targets(dir = './targets'):
"""
Loads all available targets from the targets/ directory
"""
target_descs = [ os.path.basename(f) for f in glob.glob(dir+"/*.json") ]
targets = []
for desc in target_descs:
json = open('%s/%s' % (dir, desc), 'r').read()
targets.append(Target(json))
return targets
|
Add module that parses and loads target descriptions.
|
Add module that parses and loads target descriptions.
|
Python
|
bsd-3-clause
|
vtcsec/wargame-scorer
|
Add module that parses and loads target descriptions.
|
"""
Functions for managing targets.
"""
import json
import logging
import os
import glob
import services
from services import *
class Target(object):
def __init__(self, json_desc):
config = json.loads(json_desc)
self.name = config['name']
self.host = config['host']
self.services = []
for service in config['services']:
name = service['service']
plugin = services.Service.get_plugin(name)
if plugin is None:
logging.warning("Failed to load service %s from %s target: Service not found" % (name, self.name))
continue
optional = service.get('optional', False)
value = service['value']
# parse the custom config for the plugin
if name in config:
service_config = config[name]
service_config['host'] = self.host
else:
service_config = { 'host': self.host }
logging.info("Plugin %s: loaded service %s" % (self.name, name))
self.services.append( {'plugin': plugin, 'optional': optional, 'value': value, 'config': service_config } )
def load_targets(dir = './targets'):
"""
Loads all available targets from the targets/ directory
"""
target_descs = [ os.path.basename(f) for f in glob.glob(dir+"/*.json") ]
targets = []
for desc in target_descs:
json = open('%s/%s' % (dir, desc), 'r').read()
targets.append(Target(json))
return targets
|
<commit_before><commit_msg>Add module that parses and loads target descriptions.<commit_after>
|
"""
Functions for managing targets.
"""
import json
import logging
import os
import glob
import services
from services import *
class Target(object):
def __init__(self, json_desc):
config = json.loads(json_desc)
self.name = config['name']
self.host = config['host']
self.services = []
for service in config['services']:
name = service['service']
plugin = services.Service.get_plugin(name)
if plugin is None:
logging.warning("Failed to load service %s from %s target: Service not found" % (name, self.name))
continue
optional = service.get('optional', False)
value = service['value']
# parse the custom config for the plugin
if name in config:
service_config = config[name]
service_config['host'] = self.host
else:
service_config = { 'host': self.host }
logging.info("Plugin %s: loaded service %s" % (self.name, name))
self.services.append( {'plugin': plugin, 'optional': optional, 'value': value, 'config': service_config } )
def load_targets(dir = './targets'):
"""
Loads all available targets from the targets/ directory
"""
target_descs = [ os.path.basename(f) for f in glob.glob(dir+"/*.json") ]
targets = []
for desc in target_descs:
json = open('%s/%s' % (dir, desc), 'r').read()
targets.append(Target(json))
return targets
|
Add module that parses and loads target descriptions."""
Functions for managing targets.
"""
import json
import logging
import os
import glob
import services
from services import *
class Target(object):
def __init__(self, json_desc):
config = json.loads(json_desc)
self.name = config['name']
self.host = config['host']
self.services = []
for service in config['services']:
name = service['service']
plugin = services.Service.get_plugin(name)
if plugin is None:
logging.warning("Failed to load service %s from %s target: Service not found" % (name, self.name))
continue
optional = service.get('optional', False)
value = service['value']
# parse the custom config for the plugin
if name in config:
service_config = config[name]
service_config['host'] = self.host
else:
service_config = { 'host': self.host }
logging.info("Plugin %s: loaded service %s" % (self.name, name))
self.services.append( {'plugin': plugin, 'optional': optional, 'value': value, 'config': service_config } )
def load_targets(dir = './targets'):
"""
Loads all available targets from the targets/ directory
"""
target_descs = [ os.path.basename(f) for f in glob.glob(dir+"/*.json") ]
targets = []
for desc in target_descs:
json = open('%s/%s' % (dir, desc), 'r').read()
targets.append(Target(json))
return targets
|
<commit_before><commit_msg>Add module that parses and loads target descriptions.<commit_after>"""
Functions for managing targets.
"""
import json
import logging
import os
import glob
import services
from services import *
class Target(object):
def __init__(self, json_desc):
config = json.loads(json_desc)
self.name = config['name']
self.host = config['host']
self.services = []
for service in config['services']:
name = service['service']
plugin = services.Service.get_plugin(name)
if plugin is None:
logging.warning("Failed to load service %s from %s target: Service not found" % (name, self.name))
continue
optional = service.get('optional', False)
value = service['value']
# parse the custom config for the plugin
if name in config:
service_config = config[name]
service_config['host'] = self.host
else:
service_config = { 'host': self.host }
logging.info("Plugin %s: loaded service %s" % (self.name, name))
self.services.append( {'plugin': plugin, 'optional': optional, 'value': value, 'config': service_config } )
def load_targets(dir = './targets'):
"""
Loads all available targets from the targets/ directory
"""
target_descs = [ os.path.basename(f) for f in glob.glob(dir+"/*.json") ]
targets = []
for desc in target_descs:
json = open('%s/%s' % (dir, desc), 'r').read()
targets.append(Target(json))
return targets
|
|
8c362e8f253190ffb963ad99dcb3cc3594363fc5
|
tests/test_random.py
|
tests/test_random.py
|
import pycron
def test_parse_arg():
assert pycron._parse_arg('1/5', 0) is False
assert pycron._parse_arg('asd-dsa', 0) is False
def test_no_dt():
assert pycron.is_now('* * * * *')
|
Add couple of random tests, just to achieve 100% test coverage \o/.
|
Add couple of random tests, just to achieve 100% test coverage \o/.
|
Python
|
mit
|
kipe/pycron
|
Add couple of random tests, just to achieve 100% test coverage \o/.
|
import pycron
def test_parse_arg():
assert pycron._parse_arg('1/5', 0) is False
assert pycron._parse_arg('asd-dsa', 0) is False
def test_no_dt():
assert pycron.is_now('* * * * *')
|
<commit_before><commit_msg>Add couple of random tests, just to achieve 100% test coverage \o/.<commit_after>
|
import pycron
def test_parse_arg():
assert pycron._parse_arg('1/5', 0) is False
assert pycron._parse_arg('asd-dsa', 0) is False
def test_no_dt():
assert pycron.is_now('* * * * *')
|
Add couple of random tests, just to achieve 100% test coverage \o/.import pycron
def test_parse_arg():
assert pycron._parse_arg('1/5', 0) is False
assert pycron._parse_arg('asd-dsa', 0) is False
def test_no_dt():
assert pycron.is_now('* * * * *')
|
<commit_before><commit_msg>Add couple of random tests, just to achieve 100% test coverage \o/.<commit_after>import pycron
def test_parse_arg():
assert pycron._parse_arg('1/5', 0) is False
assert pycron._parse_arg('asd-dsa', 0) is False
def test_no_dt():
assert pycron.is_now('* * * * *')
|
|
2644015a9e78dea4ebe152f4c8e30f2e3c62b3e0
|
tests/test_roster.py
|
tests/test_roster.py
|
#!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import roster
team_id = 117
r = roster.Roster(team_id)
def test_roster_url():
roster_url = 'http://mlb.mlb.com/lookup/json/named.roster_40.bam?team_id=117'
assert r.roster_url == roster_url
def test_roster_is_list():
assert type(r.roster) is list
|
Add a couple simple pytest tests for roster class
|
Add a couple simple pytest tests for roster class
|
Python
|
mit
|
panzarino/mlbgame,zachpanz88/mlbgame
|
Add a couple simple pytest tests for roster class
|
#!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import roster
team_id = 117
r = roster.Roster(team_id)
def test_roster_url():
roster_url = 'http://mlb.mlb.com/lookup/json/named.roster_40.bam?team_id=117'
assert r.roster_url == roster_url
def test_roster_is_list():
assert type(r.roster) is list
|
<commit_before><commit_msg>Add a couple simple pytest tests for roster class<commit_after>
|
#!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import roster
team_id = 117
r = roster.Roster(team_id)
def test_roster_url():
roster_url = 'http://mlb.mlb.com/lookup/json/named.roster_40.bam?team_id=117'
assert r.roster_url == roster_url
def test_roster_is_list():
assert type(r.roster) is list
|
Add a couple simple pytest tests for roster class#!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import roster
team_id = 117
r = roster.Roster(team_id)
def test_roster_url():
roster_url = 'http://mlb.mlb.com/lookup/json/named.roster_40.bam?team_id=117'
assert r.roster_url == roster_url
def test_roster_is_list():
assert type(r.roster) is list
|
<commit_before><commit_msg>Add a couple simple pytest tests for roster class<commit_after>#!/usr/bin/env python
import pytest
from datetime import datetime
from mlbgame import roster
team_id = 117
r = roster.Roster(team_id)
def test_roster_url():
roster_url = 'http://mlb.mlb.com/lookup/json/named.roster_40.bam?team_id=117'
assert r.roster_url == roster_url
def test_roster_is_list():
assert type(r.roster) is list
|
|
6c84b31b54b21bcfeddd6fca8c77f44eb1a30cf2
|
final/problem7.py
|
final/problem7.py
|
# Problem 7
# 20.0 points possible (graded)
# Write a function called general_poly.
def general_poly(L):
""" L, a list of numbers (n0, n1, n2, ... nk)
Returns a function, which when applied to a value x, returns the value
n0 * x^k + n1 * x^(k-1) + ... nk * x^0
"""
numList = L[::-1]
def apply(number):
value = 0
for i in range(len(numList)):
result = numList[i] * (number ** i)
value += result
return value
return apply
L = [1, 2, 3, 4]
print(general_poly([1, 2, 3, 4])(10))
|
Implement the general_poly function that reurns a function, applies a number, and returns a value
|
Implement the general_poly function that reurns a function, applies a number, and returns a value
|
Python
|
mit
|
Kunal57/MIT_6.00.1x
|
Implement the general_poly function that reurns a function, applies a number, and returns a value
|
# Problem 7
# 20.0 points possible (graded)
# Write a function called general_poly.
def general_poly(L):
""" L, a list of numbers (n0, n1, n2, ... nk)
Returns a function, which when applied to a value x, returns the value
n0 * x^k + n1 * x^(k-1) + ... nk * x^0
"""
numList = L[::-1]
def apply(number):
value = 0
for i in range(len(numList)):
result = numList[i] * (number ** i)
value += result
return value
return apply
L = [1, 2, 3, 4]
print(general_poly([1, 2, 3, 4])(10))
|
<commit_before><commit_msg>Implement the general_poly function that reurns a function, applies a number, and returns a value<commit_after>
|
# Problem 7
# 20.0 points possible (graded)
# Write a function called general_poly.
def general_poly(L):
""" L, a list of numbers (n0, n1, n2, ... nk)
Returns a function, which when applied to a value x, returns the value
n0 * x^k + n1 * x^(k-1) + ... nk * x^0
"""
numList = L[::-1]
def apply(number):
value = 0
for i in range(len(numList)):
result = numList[i] * (number ** i)
value += result
return value
return apply
L = [1, 2, 3, 4]
print(general_poly([1, 2, 3, 4])(10))
|
Implement the general_poly function that reurns a function, applies a number, and returns a value# Problem 7
# 20.0 points possible (graded)
# Write a function called general_poly.
def general_poly(L):
""" L, a list of numbers (n0, n1, n2, ... nk)
Returns a function, which when applied to a value x, returns the value
n0 * x^k + n1 * x^(k-1) + ... nk * x^0
"""
numList = L[::-1]
def apply(number):
value = 0
for i in range(len(numList)):
result = numList[i] * (number ** i)
value += result
return value
return apply
L = [1, 2, 3, 4]
print(general_poly([1, 2, 3, 4])(10))
|
<commit_before><commit_msg>Implement the general_poly function that reurns a function, applies a number, and returns a value<commit_after># Problem 7
# 20.0 points possible (graded)
# Write a function called general_poly.
def general_poly(L):
""" L, a list of numbers (n0, n1, n2, ... nk)
Returns a function, which when applied to a value x, returns the value
n0 * x^k + n1 * x^(k-1) + ... nk * x^0
"""
numList = L[::-1]
def apply(number):
value = 0
for i in range(len(numList)):
result = numList[i] * (number ** i)
value += result
return value
return apply
L = [1, 2, 3, 4]
print(general_poly([1, 2, 3, 4])(10))
|
|
c99eeda83b9623e88a694a27a6aa83d0ecaecf0f
|
website/jdpages/migrations/0011_headerimagevalidator.py
|
website/jdpages/migrations/0011_headerimagevalidator.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
import website.jdpages.models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0010_pageheaderimagewidget_pageheadersettingswidget'),
]
operations = [
migrations.AlterField(
model_name='pageheaderimagewidget',
name='image',
field=mezzanine.core.fields.FileField(max_length=200, validators=[website.jdpages.models.validate_header_image]),
preserve_default=True,
),
]
|
Add missing migration for header image validator
|
Add missing migration for header image validator
|
Python
|
mit
|
jonge-democraten/website,jonge-democraten/website,jonge-democraten/website,jonge-democraten/website
|
Add missing migration for header image validator
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
import website.jdpages.models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0010_pageheaderimagewidget_pageheadersettingswidget'),
]
operations = [
migrations.AlterField(
model_name='pageheaderimagewidget',
name='image',
field=mezzanine.core.fields.FileField(max_length=200, validators=[website.jdpages.models.validate_header_image]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for header image validator<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
import website.jdpages.models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0010_pageheaderimagewidget_pageheadersettingswidget'),
]
operations = [
migrations.AlterField(
model_name='pageheaderimagewidget',
name='image',
field=mezzanine.core.fields.FileField(max_length=200, validators=[website.jdpages.models.validate_header_image]),
preserve_default=True,
),
]
|
Add missing migration for header image validator# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
import website.jdpages.models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0010_pageheaderimagewidget_pageheadersettingswidget'),
]
operations = [
migrations.AlterField(
model_name='pageheaderimagewidget',
name='image',
field=mezzanine.core.fields.FileField(max_length=200, validators=[website.jdpages.models.validate_header_image]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for header image validator<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
import website.jdpages.models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0010_pageheaderimagewidget_pageheadersettingswidget'),
]
operations = [
migrations.AlterField(
model_name='pageheaderimagewidget',
name='image',
field=mezzanine.core.fields.FileField(max_length=200, validators=[website.jdpages.models.validate_header_image]),
preserve_default=True,
),
]
|
|
da3f2b5f4d6993130299bd54bf8f56e0004861b9
|
doc/examples/plot_label.py
|
doc/examples/plot_label.py
|
"""
===================
Label image regions
===================
This example shows how to segment an image with image labelling.
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage import data
from skimage.filter import threshold_otsu
from skimage.segmentation import clear_border
from skimage.morphology import label
from skimage.measure import regionprops
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = image > thresh
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6))
plt.gray()
ax.imshow(bw)
# remove artifacts connected to image border
cleared = bw.copy()
clear_border(cleared)
# label image regions
label_image = label(cleared)
for region in regionprops(label_image, ['Area', 'BoundingBox']):
# skip small images
if region['Area'] < 100:
continue
# draw rectangle around segmented coins
minr, minc, maxr, maxc = region['BoundingBox']
rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr,
fill=False, edgecolor='red', linewidth=2)
ax.add_patch(rect)
plt.show()
|
Add example script for image labelling
|
Add example script for image labelling
|
Python
|
bsd-3-clause
|
GaZ3ll3/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,paalge/scikit-image,paalge/scikit-image,emon10005/scikit-image,paalge/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,emon10005/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,oew1v07/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,WarrenWeckesser/scikits-image,bennlich/scikit-image,almarklein/scikit-image,warmspringwinds/scikit-image,rjeli/scikit-image,bennlich/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,dpshelio/scikit-image,Britefury/scikit-image,ClinicalGraphics/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,chriscrosscutler/scikit-image,blink1073/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,robintw/scikit-image,newville/scikit-image,chintak/scikit-image,bsipocz/scikit-image,rjeli/scikit-image,newville/scikit-image,almarklein/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,keflavich/scikit-image,bsipocz/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,juliusbierk/scikit-image,Midafi/scikit-image,rjeli/scikit-image,Midafi/scikit-image,SamHames/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,jwiggins/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,ofgulban/scikit-image
|
Add example script for image labelling
|
"""
===================
Label image regions
===================
This example shows how to segment an image with image labelling.
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage import data
from skimage.filter import threshold_otsu
from skimage.segmentation import clear_border
from skimage.morphology import label
from skimage.measure import regionprops
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = image > thresh
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6))
plt.gray()
ax.imshow(bw)
# remove artifacts connected to image border
cleared = bw.copy()
clear_border(cleared)
# label image regions
label_image = label(cleared)
for region in regionprops(label_image, ['Area', 'BoundingBox']):
# skip small images
if region['Area'] < 100:
continue
# draw rectangle around segmented coins
minr, minc, maxr, maxc = region['BoundingBox']
rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr,
fill=False, edgecolor='red', linewidth=2)
ax.add_patch(rect)
plt.show()
|
<commit_before><commit_msg>Add example script for image labelling<commit_after>
|
"""
===================
Label image regions
===================
This example shows how to segment an image with image labelling.
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage import data
from skimage.filter import threshold_otsu
from skimage.segmentation import clear_border
from skimage.morphology import label
from skimage.measure import regionprops
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = image > thresh
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6))
plt.gray()
ax.imshow(bw)
# remove artifacts connected to image border
cleared = bw.copy()
clear_border(cleared)
# label image regions
label_image = label(cleared)
for region in regionprops(label_image, ['Area', 'BoundingBox']):
# skip small images
if region['Area'] < 100:
continue
# draw rectangle around segmented coins
minr, minc, maxr, maxc = region['BoundingBox']
rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr,
fill=False, edgecolor='red', linewidth=2)
ax.add_patch(rect)
plt.show()
|
Add example script for image labelling"""
===================
Label image regions
===================
This example shows how to segment an image with image labelling.
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage import data
from skimage.filter import threshold_otsu
from skimage.segmentation import clear_border
from skimage.morphology import label
from skimage.measure import regionprops
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = image > thresh
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6))
plt.gray()
ax.imshow(bw)
# remove artifacts connected to image border
cleared = bw.copy()
clear_border(cleared)
# label image regions
label_image = label(cleared)
for region in regionprops(label_image, ['Area', 'BoundingBox']):
# skip small images
if region['Area'] < 100:
continue
# draw rectangle around segmented coins
minr, minc, maxr, maxc = region['BoundingBox']
rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr,
fill=False, edgecolor='red', linewidth=2)
ax.add_patch(rect)
plt.show()
|
<commit_before><commit_msg>Add example script for image labelling<commit_after>"""
===================
Label image regions
===================
This example shows how to segment an image with image labelling.
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from skimage import data
from skimage.filter import threshold_otsu
from skimage.segmentation import clear_border
from skimage.morphology import label
from skimage.measure import regionprops
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = image > thresh
fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(6, 6))
plt.gray()
ax.imshow(bw)
# remove artifacts connected to image border
cleared = bw.copy()
clear_border(cleared)
# label image regions
label_image = label(cleared)
for region in regionprops(label_image, ['Area', 'BoundingBox']):
# skip small images
if region['Area'] < 100:
continue
# draw rectangle around segmented coins
minr, minc, maxr, maxc = region['BoundingBox']
rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr,
fill=False, edgecolor='red', linewidth=2)
ax.add_patch(rect)
plt.show()
|
|
15cdcfe30e8599ff2968a0848169e48ad1ade831
|
fetch_configs/syzygy.py
|
fetch_configs/syzygy.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add a Syzygy fetch config.
|
Add a Syzygy fetch config.
Review URL: https://codereview.chromium.org/1749213002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@299057 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
CoherentLabs/depot_tools,CoherentLabs/depot_tools,primiano/depot_tools,primiano/depot_tools,primiano/depot_tools
|
Add a Syzygy fetch config.
Review URL: https://codereview.chromium.org/1749213002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@299057 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add a Syzygy fetch config.
Review URL: https://codereview.chromium.org/1749213002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@299057 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add a Syzygy fetch config.
Review URL: https://codereview.chromium.org/1749213002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@299057 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add a Syzygy fetch config.
Review URL: https://codereview.chromium.org/1749213002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@299057 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
b8f101cb91a24879591bc97531d14764de86760b
|
jenkins/list_jobs.py
|
jenkins/list_jobs.py
|
from __future__ import print_function
import glob
import os
import re
import json
JOBS_PATH = './'
os.chdir(JOBS_PATH)
jobs = {
'job': [],
'pipelineJob': []
}
for job_file in glob.glob("*.groovy"):
with open(os.path.join(JOBS_PATH, job_file)) as f:
data = f.read()
for j in jobs.keys():
o = re.search('%s\(\'(.+?)\'\)' % j, data)
if o:
jobs[j].append(o.group(1))
break
print(json.dumps(jobs, indent=4))
|
Add script to list all jenkins jobs
|
Add script to list all jenkins jobs
|
Python
|
mit
|
madcore-ai/core,madcore-ai/core
|
Add script to list all jenkins jobs
|
from __future__ import print_function
import glob
import os
import re
import json
JOBS_PATH = './'
os.chdir(JOBS_PATH)
jobs = {
'job': [],
'pipelineJob': []
}
for job_file in glob.glob("*.groovy"):
with open(os.path.join(JOBS_PATH, job_file)) as f:
data = f.read()
for j in jobs.keys():
o = re.search('%s\(\'(.+?)\'\)' % j, data)
if o:
jobs[j].append(o.group(1))
break
print(json.dumps(jobs, indent=4))
|
<commit_before><commit_msg>Add script to list all jenkins jobs<commit_after>
|
from __future__ import print_function
import glob
import os
import re
import json
JOBS_PATH = './'
os.chdir(JOBS_PATH)
jobs = {
'job': [],
'pipelineJob': []
}
for job_file in glob.glob("*.groovy"):
with open(os.path.join(JOBS_PATH, job_file)) as f:
data = f.read()
for j in jobs.keys():
o = re.search('%s\(\'(.+?)\'\)' % j, data)
if o:
jobs[j].append(o.group(1))
break
print(json.dumps(jobs, indent=4))
|
Add script to list all jenkins jobsfrom __future__ import print_function
import glob
import os
import re
import json
JOBS_PATH = './'
os.chdir(JOBS_PATH)
jobs = {
'job': [],
'pipelineJob': []
}
for job_file in glob.glob("*.groovy"):
with open(os.path.join(JOBS_PATH, job_file)) as f:
data = f.read()
for j in jobs.keys():
o = re.search('%s\(\'(.+?)\'\)' % j, data)
if o:
jobs[j].append(o.group(1))
break
print(json.dumps(jobs, indent=4))
|
<commit_before><commit_msg>Add script to list all jenkins jobs<commit_after>from __future__ import print_function
import glob
import os
import re
import json
JOBS_PATH = './'
os.chdir(JOBS_PATH)
jobs = {
'job': [],
'pipelineJob': []
}
for job_file in glob.glob("*.groovy"):
with open(os.path.join(JOBS_PATH, job_file)) as f:
data = f.read()
for j in jobs.keys():
o = re.search('%s\(\'(.+?)\'\)' % j, data)
if o:
jobs[j].append(o.group(1))
break
print(json.dumps(jobs, indent=4))
|
|
54ed840b15a264c23c069f780d1fd6b62912ebf6
|
Snippets/remove-overlaps.py
|
Snippets/remove-overlaps.py
|
#! /usr/bin/env python3
# Example script to remove overlaps in TTF using skia-pathops
import sys
from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import DecomposingRecordingPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
try:
import pathops
except ImportError:
sys.exit(
"This script requires the skia-pathops module. "
"`pip install skia-pathops` and then retry."
)
def skpath_from_simple_glyph(glyphName, glyphSet):
path = pathops.Path()
pathPen = path.getPen()
glyphSet[glyphName].draw(pathPen)
return path
def skpath_from_composite_glyph(glyphName, glyphSet):
# record TTGlyph outlines without components
dcPen = DecomposingRecordingPen(glyphSet)
glyphSet[glyphName].draw(dcPen)
# replay recording onto a skia-pathops Path
path = pathops.Path()
pathPen = path.getPen()
dcPen.replay(pathPen)
return path
def tt_glyph_from_skpath(path):
ttPen = TTGlyphPen(None)
path.draw(ttPen)
return ttPen.glyph()
def main():
if len(sys.argv) != 3:
print("usage: remove-overlaps.py fontfile.ttf outfile.ttf")
sys.exit(1)
src = sys.argv[1]
dst = sys.argv[2]
with TTFont(src) as f:
glyfTable = f["glyf"]
glyphSet = f.getGlyphSet()
for glyphName in glyphSet.keys():
if glyfTable[glyphName].isComposite():
path = skpath_from_composite_glyph(glyphName, glyphSet)
else:
path = skpath_from_simple_glyph(glyphName, glyphSet)
# duplicate path
path2 = pathops.Path(path)
# remove overlaps
path2.simplify()
# replace TTGlyph if simplified copy is different
if path2 != path:
glyfTable[glyphName] = tt_glyph_from_skpath(path2)
f.save(dst)
if __name__ == "__main__":
main()
|
Add snippet to remove overlaps on TTF with skia-pathops
|
Add snippet to remove overlaps on TTF with skia-pathops
|
Python
|
mit
|
fonttools/fonttools,googlefonts/fonttools
|
Add snippet to remove overlaps on TTF with skia-pathops
|
#! /usr/bin/env python3
# Example script to remove overlaps in TTF using skia-pathops
import sys
from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import DecomposingRecordingPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
try:
import pathops
except ImportError:
sys.exit(
"This script requires the skia-pathops module. "
"`pip install skia-pathops` and then retry."
)
def skpath_from_simple_glyph(glyphName, glyphSet):
path = pathops.Path()
pathPen = path.getPen()
glyphSet[glyphName].draw(pathPen)
return path
def skpath_from_composite_glyph(glyphName, glyphSet):
# record TTGlyph outlines without components
dcPen = DecomposingRecordingPen(glyphSet)
glyphSet[glyphName].draw(dcPen)
# replay recording onto a skia-pathops Path
path = pathops.Path()
pathPen = path.getPen()
dcPen.replay(pathPen)
return path
def tt_glyph_from_skpath(path):
ttPen = TTGlyphPen(None)
path.draw(ttPen)
return ttPen.glyph()
def main():
if len(sys.argv) != 3:
print("usage: remove-overlaps.py fontfile.ttf outfile.ttf")
sys.exit(1)
src = sys.argv[1]
dst = sys.argv[2]
with TTFont(src) as f:
glyfTable = f["glyf"]
glyphSet = f.getGlyphSet()
for glyphName in glyphSet.keys():
if glyfTable[glyphName].isComposite():
path = skpath_from_composite_glyph(glyphName, glyphSet)
else:
path = skpath_from_simple_glyph(glyphName, glyphSet)
# duplicate path
path2 = pathops.Path(path)
# remove overlaps
path2.simplify()
# replace TTGlyph if simplified copy is different
if path2 != path:
glyfTable[glyphName] = tt_glyph_from_skpath(path2)
f.save(dst)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add snippet to remove overlaps on TTF with skia-pathops<commit_after>
|
#! /usr/bin/env python3
# Example script to remove overlaps in TTF using skia-pathops
import sys
from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import DecomposingRecordingPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
try:
import pathops
except ImportError:
sys.exit(
"This script requires the skia-pathops module. "
"`pip install skia-pathops` and then retry."
)
def skpath_from_simple_glyph(glyphName, glyphSet):
path = pathops.Path()
pathPen = path.getPen()
glyphSet[glyphName].draw(pathPen)
return path
def skpath_from_composite_glyph(glyphName, glyphSet):
# record TTGlyph outlines without components
dcPen = DecomposingRecordingPen(glyphSet)
glyphSet[glyphName].draw(dcPen)
# replay recording onto a skia-pathops Path
path = pathops.Path()
pathPen = path.getPen()
dcPen.replay(pathPen)
return path
def tt_glyph_from_skpath(path):
ttPen = TTGlyphPen(None)
path.draw(ttPen)
return ttPen.glyph()
def main():
if len(sys.argv) != 3:
print("usage: remove-overlaps.py fontfile.ttf outfile.ttf")
sys.exit(1)
src = sys.argv[1]
dst = sys.argv[2]
with TTFont(src) as f:
glyfTable = f["glyf"]
glyphSet = f.getGlyphSet()
for glyphName in glyphSet.keys():
if glyfTable[glyphName].isComposite():
path = skpath_from_composite_glyph(glyphName, glyphSet)
else:
path = skpath_from_simple_glyph(glyphName, glyphSet)
# duplicate path
path2 = pathops.Path(path)
# remove overlaps
path2.simplify()
# replace TTGlyph if simplified copy is different
if path2 != path:
glyfTable[glyphName] = tt_glyph_from_skpath(path2)
f.save(dst)
if __name__ == "__main__":
main()
|
Add snippet to remove overlaps on TTF with skia-pathops#! /usr/bin/env python3
# Example script to remove overlaps in TTF using skia-pathops
import sys
from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import DecomposingRecordingPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
try:
import pathops
except ImportError:
sys.exit(
"This script requires the skia-pathops module. "
"`pip install skia-pathops` and then retry."
)
def skpath_from_simple_glyph(glyphName, glyphSet):
path = pathops.Path()
pathPen = path.getPen()
glyphSet[glyphName].draw(pathPen)
return path
def skpath_from_composite_glyph(glyphName, glyphSet):
# record TTGlyph outlines without components
dcPen = DecomposingRecordingPen(glyphSet)
glyphSet[glyphName].draw(dcPen)
# replay recording onto a skia-pathops Path
path = pathops.Path()
pathPen = path.getPen()
dcPen.replay(pathPen)
return path
def tt_glyph_from_skpath(path):
ttPen = TTGlyphPen(None)
path.draw(ttPen)
return ttPen.glyph()
def main():
if len(sys.argv) != 3:
print("usage: remove-overlaps.py fontfile.ttf outfile.ttf")
sys.exit(1)
src = sys.argv[1]
dst = sys.argv[2]
with TTFont(src) as f:
glyfTable = f["glyf"]
glyphSet = f.getGlyphSet()
for glyphName in glyphSet.keys():
if glyfTable[glyphName].isComposite():
path = skpath_from_composite_glyph(glyphName, glyphSet)
else:
path = skpath_from_simple_glyph(glyphName, glyphSet)
# duplicate path
path2 = pathops.Path(path)
# remove overlaps
path2.simplify()
# replace TTGlyph if simplified copy is different
if path2 != path:
glyfTable[glyphName] = tt_glyph_from_skpath(path2)
f.save(dst)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add snippet to remove overlaps on TTF with skia-pathops<commit_after>#! /usr/bin/env python3
# Example script to remove overlaps in TTF using skia-pathops
import sys
from fontTools.ttLib import TTFont
from fontTools.pens.recordingPen import DecomposingRecordingPen
from fontTools.pens.ttGlyphPen import TTGlyphPen
try:
import pathops
except ImportError:
sys.exit(
"This script requires the skia-pathops module. "
"`pip install skia-pathops` and then retry."
)
def skpath_from_simple_glyph(glyphName, glyphSet):
path = pathops.Path()
pathPen = path.getPen()
glyphSet[glyphName].draw(pathPen)
return path
def skpath_from_composite_glyph(glyphName, glyphSet):
# record TTGlyph outlines without components
dcPen = DecomposingRecordingPen(glyphSet)
glyphSet[glyphName].draw(dcPen)
# replay recording onto a skia-pathops Path
path = pathops.Path()
pathPen = path.getPen()
dcPen.replay(pathPen)
return path
def tt_glyph_from_skpath(path):
ttPen = TTGlyphPen(None)
path.draw(ttPen)
return ttPen.glyph()
def main():
if len(sys.argv) != 3:
print("usage: remove-overlaps.py fontfile.ttf outfile.ttf")
sys.exit(1)
src = sys.argv[1]
dst = sys.argv[2]
with TTFont(src) as f:
glyfTable = f["glyf"]
glyphSet = f.getGlyphSet()
for glyphName in glyphSet.keys():
if glyfTable[glyphName].isComposite():
path = skpath_from_composite_glyph(glyphName, glyphSet)
else:
path = skpath_from_simple_glyph(glyphName, glyphSet)
# duplicate path
path2 = pathops.Path(path)
# remove overlaps
path2.simplify()
# replace TTGlyph if simplified copy is different
if path2 != path:
glyfTable[glyphName] = tt_glyph_from_skpath(path2)
f.save(dst)
if __name__ == "__main__":
main()
|
|
abe89039e48087568f0b8ccc8987e848704da4ab
|
src/pretix/__main__.py
|
src/pretix/__main__.py
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Allow to call manage.py as python -m pretix
|
Allow to call manage.py as python -m pretix
|
Python
|
apache-2.0
|
Flamacue/pretix,Flamacue/pretix,Flamacue/pretix,Flamacue/pretix
|
Allow to call manage.py as python -m pretix
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before><commit_msg>Allow to call manage.py as python -m pretix<commit_after>
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Allow to call manage.py as python -m pretiximport os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before><commit_msg>Allow to call manage.py as python -m pretix<commit_after>import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
|
e6ce8e25ac819a874eb4e42087157f9cf780e0e4
|
lib/rapidsms/contrib/messagelog/tests.py
|
lib/rapidsms/contrib/messagelog/tests.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.conf import settings
import rapidsms.contrib.messagelog.app
def test_messagelog():
app = rapidsms.contrib.messagelog.app.App()
# Invoke _log, make sure it doesn't blow up regardless of Django version
app._log('I', {}, "text")
|
Add a test for contrib.messagelog's _log() method
|
Add a test for contrib.messagelog's _log() method
|
Python
|
bsd-3-clause
|
ehealthafrica-ci/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,peterayeni/rapidsms,eHealthAfrica/rapidsms,ehealthafrica-ci/rapidsms,ehealthafrica-ci/rapidsms,lsgunth/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,caktus/rapidsms,lsgunth/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,caktus/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,catalpainternational/rapidsms
|
Add a test for contrib.messagelog's _log() method
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.conf import settings
import rapidsms.contrib.messagelog.app
def test_messagelog():
app = rapidsms.contrib.messagelog.app.App()
# Invoke _log, make sure it doesn't blow up regardless of Django version
app._log('I', {}, "text")
|
<commit_before><commit_msg>Add a test for contrib.messagelog's _log() method<commit_after>
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.conf import settings
import rapidsms.contrib.messagelog.app
def test_messagelog():
app = rapidsms.contrib.messagelog.app.App()
# Invoke _log, make sure it doesn't blow up regardless of Django version
app._log('I', {}, "text")
|
Add a test for contrib.messagelog's _log() method#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.conf import settings
import rapidsms.contrib.messagelog.app
def test_messagelog():
app = rapidsms.contrib.messagelog.app.App()
# Invoke _log, make sure it doesn't blow up regardless of Django version
app._log('I', {}, "text")
|
<commit_before><commit_msg>Add a test for contrib.messagelog's _log() method<commit_after>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.conf import settings
import rapidsms.contrib.messagelog.app
def test_messagelog():
app = rapidsms.contrib.messagelog.app.App()
# Invoke _log, make sure it doesn't blow up regardless of Django version
app._log('I', {}, "text")
|
|
36f91ef53deba081cf84802a3fbfeda3b319a752
|
importio2/commands/extractor_document_generator.py
|
importio2/commands/extractor_document_generator.py
|
#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import logging
import sys
import os
from importio2 import ExtractorAPI
from importio2 import CrawlRunAPI
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class CrawlRunMetadata(object):
def __init__(self):
pass
class ExtractorMetadata(object):
def __init__(self):
self.crawl_runs = []
class ExtractorDocumentGenerator(object):
def __init__(self):
self._filter = None
def handle_arguments(self):
parser = argparse.ArgumentParser(description="Generates Extractor Documentation")
parser.add_argument('-f', '--filter', action='store', dest='filter', metavar='regexp',
help="Filter Extractors based on Regular Expression")
args = parser.parse_args()
if args.filter is not None:
self._filter = args.filter
def get_extractor_ids(self):
api = ExtractorAPI()
extractor_list = api.list()
print(extractor_list)
for extractor in extractor_list:
print(extractor)
def generate_documentation(self):
self.get_extractor_ids()
def execute(self):
self.handle_arguments()
self.generate_documentation()
def main():
cli = ExtractorDocumentGenerator()
cli.execute()
if __name__ == '__main__':
main()
|
Add initial stub of documentation command
|
Add initial stub of documentation command
|
Python
|
apache-2.0
|
dgwartney-io/import-io-api-python,dgwartney-io/import-io-api-python
|
Add initial stub of documentation command
|
#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import logging
import sys
import os
from importio2 import ExtractorAPI
from importio2 import CrawlRunAPI
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class CrawlRunMetadata(object):
def __init__(self):
pass
class ExtractorMetadata(object):
def __init__(self):
self.crawl_runs = []
class ExtractorDocumentGenerator(object):
def __init__(self):
self._filter = None
def handle_arguments(self):
parser = argparse.ArgumentParser(description="Generates Extractor Documentation")
parser.add_argument('-f', '--filter', action='store', dest='filter', metavar='regexp',
help="Filter Extractors based on Regular Expression")
args = parser.parse_args()
if args.filter is not None:
self._filter = args.filter
def get_extractor_ids(self):
api = ExtractorAPI()
extractor_list = api.list()
print(extractor_list)
for extractor in extractor_list:
print(extractor)
def generate_documentation(self):
self.get_extractor_ids()
def execute(self):
self.handle_arguments()
self.generate_documentation()
def main():
cli = ExtractorDocumentGenerator()
cli.execute()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add initial stub of documentation command<commit_after>
|
#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import logging
import sys
import os
from importio2 import ExtractorAPI
from importio2 import CrawlRunAPI
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class CrawlRunMetadata(object):
def __init__(self):
pass
class ExtractorMetadata(object):
def __init__(self):
self.crawl_runs = []
class ExtractorDocumentGenerator(object):
def __init__(self):
self._filter = None
def handle_arguments(self):
parser = argparse.ArgumentParser(description="Generates Extractor Documentation")
parser.add_argument('-f', '--filter', action='store', dest='filter', metavar='regexp',
help="Filter Extractors based on Regular Expression")
args = parser.parse_args()
if args.filter is not None:
self._filter = args.filter
def get_extractor_ids(self):
api = ExtractorAPI()
extractor_list = api.list()
print(extractor_list)
for extractor in extractor_list:
print(extractor)
def generate_documentation(self):
self.get_extractor_ids()
def execute(self):
self.handle_arguments()
self.generate_documentation()
def main():
cli = ExtractorDocumentGenerator()
cli.execute()
if __name__ == '__main__':
main()
|
Add initial stub of documentation command#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import logging
import sys
import os
from importio2 import ExtractorAPI
from importio2 import CrawlRunAPI
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class CrawlRunMetadata(object):
def __init__(self):
pass
class ExtractorMetadata(object):
def __init__(self):
self.crawl_runs = []
class ExtractorDocumentGenerator(object):
def __init__(self):
self._filter = None
def handle_arguments(self):
parser = argparse.ArgumentParser(description="Generates Extractor Documentation")
parser.add_argument('-f', '--filter', action='store', dest='filter', metavar='regexp',
help="Filter Extractors based on Regular Expression")
args = parser.parse_args()
if args.filter is not None:
self._filter = args.filter
def get_extractor_ids(self):
api = ExtractorAPI()
extractor_list = api.list()
print(extractor_list)
for extractor in extractor_list:
print(extractor)
def generate_documentation(self):
self.get_extractor_ids()
def execute(self):
self.handle_arguments()
self.generate_documentation()
def main():
cli = ExtractorDocumentGenerator()
cli.execute()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add initial stub of documentation command<commit_after>#
# Copyright 2017 Import.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import logging
import sys
import os
from importio2 import ExtractorAPI
from importio2 import CrawlRunAPI
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
class CrawlRunMetadata(object):
def __init__(self):
pass
class ExtractorMetadata(object):
def __init__(self):
self.crawl_runs = []
class ExtractorDocumentGenerator(object):
def __init__(self):
self._filter = None
def handle_arguments(self):
parser = argparse.ArgumentParser(description="Generates Extractor Documentation")
parser.add_argument('-f', '--filter', action='store', dest='filter', metavar='regexp',
help="Filter Extractors based on Regular Expression")
args = parser.parse_args()
if args.filter is not None:
self._filter = args.filter
def get_extractor_ids(self):
api = ExtractorAPI()
extractor_list = api.list()
print(extractor_list)
for extractor in extractor_list:
print(extractor)
def generate_documentation(self):
self.get_extractor_ids()
def execute(self):
self.handle_arguments()
self.generate_documentation()
def main():
cli = ExtractorDocumentGenerator()
cli.execute()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.